var/home/core/zuul-output/0000755000175000017500000000000015071054704014530 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015071060717015475 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003454147615071060710017710 0ustar rootrootOct 07 00:11:12 crc systemd[1]: Starting Kubernetes Kubelet... Oct 07 00:11:12 crc restorecon[4678]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:12 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 00:11:13 crc restorecon[4678]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 00:11:13 crc restorecon[4678]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 07 00:11:13 crc kubenswrapper[4791]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 00:11:13 crc kubenswrapper[4791]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 07 00:11:13 crc kubenswrapper[4791]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 00:11:13 crc kubenswrapper[4791]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 00:11:13 crc kubenswrapper[4791]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 07 00:11:13 crc kubenswrapper[4791]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.795434 4791 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799582 4791 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799635 4791 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799643 4791 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799650 4791 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799658 4791 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799664 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799670 4791 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799675 4791 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799681 4791 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799686 4791 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799691 4791 feature_gate.go:330] unrecognized feature gate: Example Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799698 4791 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799705 4791 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799712 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799719 4791 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799725 4791 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799730 4791 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799735 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799741 4791 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799747 4791 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799752 4791 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799757 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799762 4791 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799767 4791 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799772 4791 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799777 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799784 4791 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799789 4791 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799794 4791 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799799 4791 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799805 4791 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799811 4791 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799816 4791 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799822 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799827 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799833 4791 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799839 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799844 4791 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799850 4791 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799855 4791 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799861 4791 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799866 4791 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799872 4791 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799877 4791 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799882 4791 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799888 4791 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799894 4791 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799899 4791 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799906 4791 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799913 4791 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799921 4791 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799928 4791 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799933 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799938 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799945 4791 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799950 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799957 4791 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799963 4791 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799969 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799974 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799979 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799985 4791 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799990 4791 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.799998 4791 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.800006 4791 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.800011 4791 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.800017 4791 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.800023 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.800028 4791 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.800033 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.800038 4791 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800730 4791 flags.go:64] FLAG: --address="0.0.0.0" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800748 4791 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800759 4791 flags.go:64] FLAG: --anonymous-auth="true" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800767 4791 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800775 4791 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800783 4791 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800791 4791 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800799 4791 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800805 4791 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800811 4791 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800818 4791 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800825 4791 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800831 4791 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800837 4791 flags.go:64] FLAG: --cgroup-root="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800842 4791 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800848 4791 flags.go:64] FLAG: --client-ca-file="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800855 4791 flags.go:64] FLAG: --cloud-config="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800860 4791 flags.go:64] FLAG: --cloud-provider="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800866 4791 flags.go:64] FLAG: --cluster-dns="[]" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800872 4791 flags.go:64] FLAG: --cluster-domain="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800878 4791 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800884 4791 flags.go:64] FLAG: --config-dir="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800890 4791 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800896 4791 flags.go:64] FLAG: --container-log-max-files="5" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800905 4791 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800911 4791 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.800917 4791 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.801056 4791 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802540 4791 flags.go:64] FLAG: --contention-profiling="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802618 4791 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802625 4791 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802635 4791 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802643 4791 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802666 4791 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802672 4791 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802678 4791 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802683 4791 flags.go:64] FLAG: --enable-load-reader="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802709 4791 flags.go:64] FLAG: --enable-server="true" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802714 4791 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802729 4791 flags.go:64] FLAG: --event-burst="100" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802737 4791 flags.go:64] FLAG: --event-qps="50" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802742 4791 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802747 4791 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802752 4791 flags.go:64] FLAG: --eviction-hard="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802759 4791 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802767 4791 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802772 4791 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802778 4791 flags.go:64] FLAG: --eviction-soft="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802782 4791 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802787 4791 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802792 4791 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802797 4791 flags.go:64] FLAG: --experimental-mounter-path="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802802 4791 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802807 4791 flags.go:64] FLAG: --fail-swap-on="true" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802814 4791 flags.go:64] FLAG: --feature-gates="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802821 4791 flags.go:64] FLAG: --file-check-frequency="20s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802826 4791 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802832 4791 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802837 4791 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802844 4791 flags.go:64] FLAG: --healthz-port="10248" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802848 4791 flags.go:64] FLAG: --help="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802855 4791 flags.go:64] FLAG: --hostname-override="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802865 4791 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802870 4791 flags.go:64] FLAG: --http-check-frequency="20s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802877 4791 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802884 4791 flags.go:64] FLAG: --image-credential-provider-config="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802890 4791 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802895 4791 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802929 4791 flags.go:64] FLAG: --image-service-endpoint="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.802934 4791 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803260 4791 flags.go:64] FLAG: --kube-api-burst="100" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803279 4791 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803288 4791 flags.go:64] FLAG: --kube-api-qps="50" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803303 4791 flags.go:64] FLAG: --kube-reserved="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803311 4791 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803316 4791 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803322 4791 flags.go:64] FLAG: --kubelet-cgroups="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803329 4791 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803342 4791 flags.go:64] FLAG: --lock-file="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803353 4791 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803363 4791 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803372 4791 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803387 4791 flags.go:64] FLAG: --log-json-split-stream="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803394 4791 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803427 4791 flags.go:64] FLAG: --log-text-split-stream="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803434 4791 flags.go:64] FLAG: --logging-format="text" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803440 4791 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803446 4791 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803453 4791 flags.go:64] FLAG: --manifest-url="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803459 4791 flags.go:64] FLAG: --manifest-url-header="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803470 4791 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803477 4791 flags.go:64] FLAG: --max-open-files="1000000" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803484 4791 flags.go:64] FLAG: --max-pods="110" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803490 4791 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803496 4791 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803501 4791 flags.go:64] FLAG: --memory-manager-policy="None" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803507 4791 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803512 4791 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803517 4791 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803523 4791 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803548 4791 flags.go:64] FLAG: --node-status-max-images="50" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803554 4791 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803561 4791 flags.go:64] FLAG: --oom-score-adj="-999" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803566 4791 flags.go:64] FLAG: --pod-cidr="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803571 4791 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803580 4791 flags.go:64] FLAG: --pod-manifest-path="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803586 4791 flags.go:64] FLAG: --pod-max-pids="-1" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803591 4791 flags.go:64] FLAG: --pods-per-core="0" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803596 4791 flags.go:64] FLAG: --port="10250" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803602 4791 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803607 4791 flags.go:64] FLAG: --provider-id="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803612 4791 flags.go:64] FLAG: --qos-reserved="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803618 4791 flags.go:64] FLAG: --read-only-port="10255" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803623 4791 flags.go:64] FLAG: --register-node="true" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803628 4791 flags.go:64] FLAG: --register-schedulable="true" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803633 4791 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803647 4791 flags.go:64] FLAG: --registry-burst="10" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803652 4791 flags.go:64] FLAG: --registry-qps="5" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803657 4791 flags.go:64] FLAG: --reserved-cpus="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803662 4791 flags.go:64] FLAG: --reserved-memory="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803668 4791 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803674 4791 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803679 4791 flags.go:64] FLAG: --rotate-certificates="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803684 4791 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803689 4791 flags.go:64] FLAG: --runonce="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803694 4791 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803699 4791 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803705 4791 flags.go:64] FLAG: --seccomp-default="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803710 4791 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803715 4791 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803722 4791 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803727 4791 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803733 4791 flags.go:64] FLAG: --storage-driver-password="root" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803739 4791 flags.go:64] FLAG: --storage-driver-secure="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803744 4791 flags.go:64] FLAG: --storage-driver-table="stats" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803750 4791 flags.go:64] FLAG: --storage-driver-user="root" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803755 4791 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803760 4791 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803766 4791 flags.go:64] FLAG: --system-cgroups="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803772 4791 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803783 4791 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803788 4791 flags.go:64] FLAG: --tls-cert-file="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803795 4791 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803806 4791 flags.go:64] FLAG: --tls-min-version="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803811 4791 flags.go:64] FLAG: --tls-private-key-file="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803817 4791 flags.go:64] FLAG: --topology-manager-policy="none" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803822 4791 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803828 4791 flags.go:64] FLAG: --topology-manager-scope="container" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803833 4791 flags.go:64] FLAG: --v="2" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803859 4791 flags.go:64] FLAG: --version="false" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803869 4791 flags.go:64] FLAG: --vmodule="" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803877 4791 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.803883 4791 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804097 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804105 4791 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804110 4791 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804115 4791 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804120 4791 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804125 4791 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804130 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804135 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804140 4791 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804145 4791 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804151 4791 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804156 4791 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804164 4791 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804171 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804177 4791 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804183 4791 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804189 4791 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804194 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804199 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804204 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804208 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804213 4791 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804218 4791 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804248 4791 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804253 4791 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804258 4791 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804262 4791 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804267 4791 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804271 4791 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804276 4791 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804280 4791 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804287 4791 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804293 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804299 4791 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804304 4791 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804308 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804313 4791 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804317 4791 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804322 4791 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804326 4791 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804330 4791 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804335 4791 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804341 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804346 4791 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804355 4791 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804361 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804366 4791 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804372 4791 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804379 4791 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804388 4791 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804394 4791 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804420 4791 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804425 4791 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804429 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804434 4791 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804440 4791 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804445 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804450 4791 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804455 4791 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804460 4791 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804465 4791 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804470 4791 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804474 4791 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804480 4791 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804485 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804491 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804495 4791 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804500 4791 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804505 4791 feature_gate.go:330] unrecognized feature gate: Example Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804512 4791 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.804518 4791 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.804537 4791 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.819601 4791 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.819672 4791 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819802 4791 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819830 4791 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819844 4791 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819859 4791 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819871 4791 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819881 4791 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819890 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819898 4791 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819906 4791 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819915 4791 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819924 4791 feature_gate.go:330] unrecognized feature gate: Example Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819933 4791 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819942 4791 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819950 4791 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819958 4791 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819965 4791 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819973 4791 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819981 4791 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819989 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.819997 4791 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820008 4791 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820021 4791 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820031 4791 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820041 4791 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820049 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820057 4791 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820065 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820073 4791 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820081 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820088 4791 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820096 4791 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820106 4791 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820118 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820129 4791 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820142 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820152 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820163 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820172 4791 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820182 4791 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820193 4791 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820204 4791 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820252 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820265 4791 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820276 4791 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820286 4791 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820298 4791 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820308 4791 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820322 4791 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820335 4791 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820347 4791 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820357 4791 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820367 4791 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820377 4791 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820387 4791 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820398 4791 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820450 4791 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820461 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820474 4791 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820486 4791 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820499 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820510 4791 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820522 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820530 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820538 4791 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820547 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820558 4791 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820568 4791 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820577 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820584 4791 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820592 4791 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820604 4791 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.820618 4791 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820853 4791 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820868 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820879 4791 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820888 4791 feature_gate.go:330] unrecognized feature gate: Example Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820896 4791 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820905 4791 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820914 4791 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820924 4791 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820933 4791 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820942 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820951 4791 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820959 4791 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820968 4791 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820977 4791 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820986 4791 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.820995 4791 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821003 4791 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821010 4791 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821018 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821026 4791 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821034 4791 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821042 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821049 4791 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821058 4791 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821066 4791 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821079 4791 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821089 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821098 4791 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821106 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821114 4791 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821123 4791 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821131 4791 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821139 4791 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821147 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821157 4791 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821166 4791 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821174 4791 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821183 4791 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821191 4791 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821200 4791 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821208 4791 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821216 4791 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821225 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821233 4791 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821241 4791 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821249 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821256 4791 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821264 4791 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821271 4791 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821279 4791 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821288 4791 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821300 4791 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821308 4791 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821315 4791 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821326 4791 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821337 4791 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821345 4791 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821354 4791 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821362 4791 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821371 4791 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821380 4791 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821388 4791 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821396 4791 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821436 4791 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821447 4791 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821458 4791 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821468 4791 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821477 4791 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821486 4791 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821496 4791 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.821510 4791 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.821526 4791 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.823619 4791 server.go:940] "Client rotation is on, will bootstrap in background" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.831113 4791 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.831276 4791 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.833996 4791 server.go:997] "Starting client certificate rotation" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.834048 4791 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.835388 4791 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-19 19:19:25.125014601 +0000 UTC Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.835489 4791 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1051h8m11.28952749s for next certificate rotation Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.861429 4791 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.864360 4791 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.888742 4791 log.go:25] "Validated CRI v1 runtime API" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.920164 4791 log.go:25] "Validated CRI v1 image API" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.922328 4791 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.928755 4791 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-07-00-01-07-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.928819 4791 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.961386 4791 manager.go:217] Machine: {Timestamp:2025-10-07 00:11:13.958058673 +0000 UTC m=+0.553996394 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:30c6043d-b881-47c8-9ee1-3608625d7a75 BootID:900efbc9-0c53-4754-b218-ee742f01afae Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:09:c2:e6 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:09:c2:e6 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:57:7e:ce Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:88:10:c7 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:78:b2:ab Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:33:db:7b Speed:-1 Mtu:1496} {Name:eth10 MacAddress:7a:bb:47:2a:16:4a Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:92:3e:8d:fb:6c:4e Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.962032 4791 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.962650 4791 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.963189 4791 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.963525 4791 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.963584 4791 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.964464 4791 topology_manager.go:138] "Creating topology manager with none policy" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.964498 4791 container_manager_linux.go:303] "Creating device plugin manager" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.965010 4791 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.965044 4791 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.965948 4791 state_mem.go:36] "Initialized new in-memory state store" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.966101 4791 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.969968 4791 kubelet.go:418] "Attempting to sync node with API server" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.970023 4791 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.970073 4791 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.970107 4791 kubelet.go:324] "Adding apiserver pod source" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.970134 4791 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.974769 4791 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.976831 4791 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.978366 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Oct 07 00:11:13 crc kubenswrapper[4791]: E1007 00:11:13.978518 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.978465 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Oct 07 00:11:13 crc kubenswrapper[4791]: E1007 00:11:13.978626 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.979219 4791 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.980952 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.980993 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.981004 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.981013 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.981030 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.981043 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.981056 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.981073 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.981105 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.981117 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.981139 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.981149 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.981177 4791 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.981904 4791 server.go:1280] "Started kubelet" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.983060 4791 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.983095 4791 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.983198 4791 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Oct 07 00:11:13 crc systemd[1]: Started Kubernetes Kubelet. Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.984488 4791 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.985779 4791 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.985817 4791 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.988858 4791 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.989359 4791 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 07 00:11:13 crc kubenswrapper[4791]: E1007 00:11:13.989280 4791 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.989875 4791 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.985958 4791 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 20:22:33.936473263 +0000 UTC Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.990490 4791 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2012h11m19.945997624s for next certificate rotation Oct 07 00:11:13 crc kubenswrapper[4791]: W1007 00:11:13.992393 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Oct 07 00:11:13 crc kubenswrapper[4791]: E1007 00:11:13.992519 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.992455 4791 factory.go:55] Registering systemd factory Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.992872 4791 factory.go:221] Registration of the systemd container factory successfully Oct 07 00:11:13 crc kubenswrapper[4791]: E1007 00:11:13.993222 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="200ms" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.995983 4791 server.go:460] "Adding debug handlers to kubelet server" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.996344 4791 factory.go:153] Registering CRI-O factory Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.996420 4791 factory.go:221] Registration of the crio container factory successfully Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.996578 4791 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.996616 4791 factory.go:103] Registering Raw factory Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.996639 4791 manager.go:1196] Started watching for new ooms in manager Oct 07 00:11:13 crc kubenswrapper[4791]: E1007 00:11:13.996426 4791 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.217:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186c0d0609e313d5 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-07 00:11:13.981862869 +0000 UTC m=+0.577800520,LastTimestamp:2025-10-07 00:11:13.981862869 +0000 UTC m=+0.577800520,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 07 00:11:13 crc kubenswrapper[4791]: I1007 00:11:13.997606 4791 manager.go:319] Starting recovery of all containers Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007216 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007297 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007321 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007341 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007361 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007381 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007428 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007452 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007475 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007493 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007512 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007563 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007580 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007601 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007620 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007640 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007660 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007679 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007696 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007716 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007737 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007759 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007778 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007797 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007818 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007838 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007861 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007885 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007904 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007922 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007940 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007961 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.007981 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008027 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008046 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008065 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008084 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008104 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008122 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008173 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008191 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008211 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008230 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008254 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008273 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008291 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008310 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008326 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008347 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008366 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008387 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008431 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008461 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008482 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008504 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008526 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008545 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008566 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008589 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008606 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008624 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008643 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008661 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008682 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008704 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008721 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008740 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008758 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008775 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008794 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008812 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008830 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008856 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008873 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008891 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008909 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008927 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008945 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008965 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.008984 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009016 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009036 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009055 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009075 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009093 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009111 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009130 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009149 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009167 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009187 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009205 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009222 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009241 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009259 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009277 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009295 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009316 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009334 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009353 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009375 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009392 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009565 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009588 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009625 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009653 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009674 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009696 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009719 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009740 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009759 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009780 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009800 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009823 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009844 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009863 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009882 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009910 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009929 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009947 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009966 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.009985 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.010004 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.012630 4791 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.012734 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.012773 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.012795 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.012814 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.012831 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.012854 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.012871 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.012889 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.012911 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.012928 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.012948 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.012966 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.012985 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.013006 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.013022 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.013039 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.013058 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.013080 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.013097 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.013123 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.013141 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.013161 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.013184 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.013204 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.013223 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015240 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015304 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015535 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015585 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015622 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015674 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015701 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015738 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015766 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015793 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015831 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015857 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015890 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015909 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015929 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015959 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.015977 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016002 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016019 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016036 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016059 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016088 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016120 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016171 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016201 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016229 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016246 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016262 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016283 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016301 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016327 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016351 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016367 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016392 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016435 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016457 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016477 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016496 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016519 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016538 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016565 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016589 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016612 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016656 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016678 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016704 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016721 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016739 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016761 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016777 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016802 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016820 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016839 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016863 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016879 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016901 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016916 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016932 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.016951 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.017012 4791 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.017026 4791 reconstruct.go:97] "Volume reconstruction finished" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.017038 4791 reconciler.go:26] "Reconciler: start to sync state" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.018330 4791 manager.go:324] Recovery completed Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.031954 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.034799 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.034868 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.034883 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.038655 4791 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.038697 4791 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.038749 4791 state_mem.go:36] "Initialized new in-memory state store" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.051672 4791 policy_none.go:49] "None policy: Start" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.053050 4791 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.053094 4791 state_mem.go:35] "Initializing new in-memory state store" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.063065 4791 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.067318 4791 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.067375 4791 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.067796 4791 kubelet.go:2335] "Starting kubelet main sync loop" Oct 07 00:11:14 crc kubenswrapper[4791]: E1007 00:11:14.067851 4791 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 07 00:11:14 crc kubenswrapper[4791]: W1007 00:11:14.068584 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Oct 07 00:11:14 crc kubenswrapper[4791]: E1007 00:11:14.068661 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Oct 07 00:11:14 crc kubenswrapper[4791]: E1007 00:11:14.089819 4791 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.108595 4791 manager.go:334] "Starting Device Plugin manager" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.108951 4791 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.108978 4791 server.go:79] "Starting device plugin registration server" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.109582 4791 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.109606 4791 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.109825 4791 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.109938 4791 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.109957 4791 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 07 00:11:14 crc kubenswrapper[4791]: E1007 00:11:14.116942 4791 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.168862 4791 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.168974 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.170988 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.171041 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.171058 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.171265 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.171566 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.171669 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.172250 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.172305 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.172326 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.172650 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.172761 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.172810 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.173214 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.173252 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.173266 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.173653 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.173682 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.173694 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.173809 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.173846 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.173860 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.174035 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.174169 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.174210 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.175718 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.175757 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.175771 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.175810 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.175868 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.175878 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.175890 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.176009 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.176086 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.176819 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.176843 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.176855 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.176999 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.177024 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.177417 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.177444 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.177456 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.177812 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.177861 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.177879 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:14 crc kubenswrapper[4791]: E1007 00:11:14.195152 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="400ms" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.210068 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.211825 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.211900 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.211919 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.211963 4791 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 00:11:14 crc kubenswrapper[4791]: E1007 00:11:14.212687 4791 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.217:6443: connect: connection refused" node="crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.219747 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.219807 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.219847 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.219934 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.219987 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.220132 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.220191 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.220237 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.220273 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.220312 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.220349 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.220389 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.220481 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.220525 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.220777 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: E1007 00:11:14.235435 4791 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.217:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186c0d0609e313d5 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-07 00:11:13.981862869 +0000 UTC m=+0.577800520,LastTimestamp:2025-10-07 00:11:13.981862869 +0000 UTC m=+0.577800520,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.322574 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.322667 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.322713 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.322746 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.322791 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.322822 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.322852 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.322883 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.322906 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.322922 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323018 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.322976 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323072 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323087 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.322917 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323113 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323131 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323141 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323166 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323178 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323189 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323198 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323230 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323278 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323314 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323376 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323469 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323247 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323538 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.323589 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.413187 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.414933 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.415004 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.415023 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.415069 4791 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 00:11:14 crc kubenswrapper[4791]: E1007 00:11:14.415885 4791 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.217:6443: connect: connection refused" node="crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.511688 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.526679 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: W1007 00:11:14.558730 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-c1db47bdc6d793addc3146640efd66c54ec07675c5932967a1fba913722cb441 WatchSource:0}: Error finding container c1db47bdc6d793addc3146640efd66c54ec07675c5932967a1fba913722cb441: Status 404 returned error can't find the container with id c1db47bdc6d793addc3146640efd66c54ec07675c5932967a1fba913722cb441 Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.559459 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: W1007 00:11:14.564819 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-30d7c25e70cb2318813b160159b4792efeb20fe6385dc7783daa978a4dff47ed WatchSource:0}: Error finding container 30d7c25e70cb2318813b160159b4792efeb20fe6385dc7783daa978a4dff47ed: Status 404 returned error can't find the container with id 30d7c25e70cb2318813b160159b4792efeb20fe6385dc7783daa978a4dff47ed Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.572113 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: W1007 00:11:14.585505 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-28cce0b9953e0c22e7bfe905f0a42aebb9724d7eb35af427c9ac5b83e50a9a78 WatchSource:0}: Error finding container 28cce0b9953e0c22e7bfe905f0a42aebb9724d7eb35af427c9ac5b83e50a9a78: Status 404 returned error can't find the container with id 28cce0b9953e0c22e7bfe905f0a42aebb9724d7eb35af427c9ac5b83e50a9a78 Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.594049 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 00:11:14 crc kubenswrapper[4791]: W1007 00:11:14.594353 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-5d131703388be9ea042efea9955650b09dfd3b149a54371d6b768eee0f07b240 WatchSource:0}: Error finding container 5d131703388be9ea042efea9955650b09dfd3b149a54371d6b768eee0f07b240: Status 404 returned error can't find the container with id 5d131703388be9ea042efea9955650b09dfd3b149a54371d6b768eee0f07b240 Oct 07 00:11:14 crc kubenswrapper[4791]: E1007 00:11:14.595940 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="800ms" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.816879 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.818634 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.818693 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.818705 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.818737 4791 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 00:11:14 crc kubenswrapper[4791]: E1007 00:11:14.819234 4791 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.217:6443: connect: connection refused" node="crc" Oct 07 00:11:14 crc kubenswrapper[4791]: W1007 00:11:14.828214 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Oct 07 00:11:14 crc kubenswrapper[4791]: E1007 00:11:14.828315 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Oct 07 00:11:14 crc kubenswrapper[4791]: I1007 00:11:14.984501 4791 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Oct 07 00:11:15 crc kubenswrapper[4791]: I1007 00:11:15.075012 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c1db47bdc6d793addc3146640efd66c54ec07675c5932967a1fba913722cb441"} Oct 07 00:11:15 crc kubenswrapper[4791]: I1007 00:11:15.075925 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"30d7c25e70cb2318813b160159b4792efeb20fe6385dc7783daa978a4dff47ed"} Oct 07 00:11:15 crc kubenswrapper[4791]: I1007 00:11:15.077073 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ad8c222a7ba2974d1f38f7d38dec1a02c199d089272a36c168fb0fa06998f0c5"} Oct 07 00:11:15 crc kubenswrapper[4791]: I1007 00:11:15.077953 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5d131703388be9ea042efea9955650b09dfd3b149a54371d6b768eee0f07b240"} Oct 07 00:11:15 crc kubenswrapper[4791]: I1007 00:11:15.078905 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"28cce0b9953e0c22e7bfe905f0a42aebb9724d7eb35af427c9ac5b83e50a9a78"} Oct 07 00:11:15 crc kubenswrapper[4791]: W1007 00:11:15.204924 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Oct 07 00:11:15 crc kubenswrapper[4791]: E1007 00:11:15.205015 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Oct 07 00:11:15 crc kubenswrapper[4791]: W1007 00:11:15.332661 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Oct 07 00:11:15 crc kubenswrapper[4791]: E1007 00:11:15.332848 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Oct 07 00:11:15 crc kubenswrapper[4791]: E1007 00:11:15.397314 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="1.6s" Oct 07 00:11:15 crc kubenswrapper[4791]: W1007 00:11:15.441900 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Oct 07 00:11:15 crc kubenswrapper[4791]: E1007 00:11:15.442210 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Oct 07 00:11:15 crc kubenswrapper[4791]: I1007 00:11:15.619379 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:15 crc kubenswrapper[4791]: I1007 00:11:15.620814 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:15 crc kubenswrapper[4791]: I1007 00:11:15.620922 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:15 crc kubenswrapper[4791]: I1007 00:11:15.620939 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:15 crc kubenswrapper[4791]: I1007 00:11:15.621019 4791 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 00:11:15 crc kubenswrapper[4791]: E1007 00:11:15.621893 4791 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.217:6443: connect: connection refused" node="crc" Oct 07 00:11:15 crc kubenswrapper[4791]: I1007 00:11:15.984283 4791 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.086908 4791 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="35eefb2f2b9c1466ad164f2c0411a70842d911da3288ee87a6fd3938a2f22d89" exitCode=0 Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.087029 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"35eefb2f2b9c1466ad164f2c0411a70842d911da3288ee87a6fd3938a2f22d89"} Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.087136 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.089133 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.089171 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.089183 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.091551 4791 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554" exitCode=0 Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.091615 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.091704 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554"} Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.092419 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.092440 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.092448 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.098742 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42"} Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.098787 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b"} Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.098809 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141"} Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.098820 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310"} Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.098906 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.100573 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.100608 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.100623 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.104286 4791 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa" exitCode=0 Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.104355 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa"} Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.104501 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.106350 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.106375 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.106386 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.108375 4791 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113" exitCode=0 Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.108436 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113"} Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.108531 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.109209 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.109229 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.109238 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.113145 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.113919 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.113942 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.113953 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:16 crc kubenswrapper[4791]: I1007 00:11:16.984567 4791 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Oct 07 00:11:16 crc kubenswrapper[4791]: E1007 00:11:16.998829 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="3.2s" Oct 07 00:11:17 crc kubenswrapper[4791]: W1007 00:11:17.023838 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Oct 07 00:11:17 crc kubenswrapper[4791]: E1007 00:11:17.024003 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.114147 4791 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d" exitCode=0 Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.114304 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.114280 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d"} Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.115537 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.115582 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.115594 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.118088 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.118299 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"f2c2fcd66158ca91ced945b9a4d8ed2aa95ed2db56224f862e5b35d4d0e5230a"} Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.121371 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.121426 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.121443 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.125809 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.125792 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6e23b288d4d32b7fdb7734780996ad0f774af3a20123538f7c86311f56044e16"} Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.125976 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2186ca7217fced1fc3359283d525790e52592c811c1c71faa1d1eca99c9294e6"} Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.126012 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"f15d5cf1154d171f7e12462edcb9b53d881f545c083ac5fb3334ab7451b2559d"} Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.127673 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.127707 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.127720 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.130201 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6"} Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.130259 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.130261 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b"} Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.130904 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c"} Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.130936 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b"} Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.130269 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.131220 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.132439 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.132456 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.133141 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.133179 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.133191 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.222501 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.224128 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.224167 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.224177 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.224202 4791 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 00:11:17 crc kubenswrapper[4791]: E1007 00:11:17.224824 4791 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.217:6443: connect: connection refused" node="crc" Oct 07 00:11:17 crc kubenswrapper[4791]: I1007 00:11:17.658491 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.135987 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966"} Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.136048 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.136091 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.137328 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.137362 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.137374 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.138655 4791 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264" exitCode=0 Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.138852 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.138994 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264"} Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.139083 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.139093 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.140013 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.140077 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.140090 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.140014 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.140179 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.140204 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.140221 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.140242 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.140254 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.917850 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.918095 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.919369 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.919431 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:18 crc kubenswrapper[4791]: I1007 00:11:18.919442 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:19 crc kubenswrapper[4791]: I1007 00:11:19.106963 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:19 crc kubenswrapper[4791]: I1007 00:11:19.146580 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:19 crc kubenswrapper[4791]: I1007 00:11:19.146594 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd"} Oct 07 00:11:19 crc kubenswrapper[4791]: I1007 00:11:19.146638 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560"} Oct 07 00:11:19 crc kubenswrapper[4791]: I1007 00:11:19.146653 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435"} Oct 07 00:11:19 crc kubenswrapper[4791]: I1007 00:11:19.146664 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff"} Oct 07 00:11:19 crc kubenswrapper[4791]: I1007 00:11:19.146754 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 00:11:19 crc kubenswrapper[4791]: I1007 00:11:19.146835 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:19 crc kubenswrapper[4791]: I1007 00:11:19.148052 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:19 crc kubenswrapper[4791]: I1007 00:11:19.148108 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:19 crc kubenswrapper[4791]: I1007 00:11:19.148127 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:19 crc kubenswrapper[4791]: I1007 00:11:19.148353 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:19 crc kubenswrapper[4791]: I1007 00:11:19.148384 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:19 crc kubenswrapper[4791]: I1007 00:11:19.148416 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.153863 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a"} Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.153918 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.153968 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.154101 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.156204 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.156276 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.156305 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.158726 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.158796 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.158811 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.425784 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.427338 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.427397 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.427444 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.427474 4791 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 00:11:20 crc kubenswrapper[4791]: I1007 00:11:20.699121 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:21 crc kubenswrapper[4791]: I1007 00:11:21.156867 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:21 crc kubenswrapper[4791]: I1007 00:11:21.156867 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:21 crc kubenswrapper[4791]: I1007 00:11:21.157835 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:21 crc kubenswrapper[4791]: I1007 00:11:21.157888 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:21 crc kubenswrapper[4791]: I1007 00:11:21.157900 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:21 crc kubenswrapper[4791]: I1007 00:11:21.158062 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:21 crc kubenswrapper[4791]: I1007 00:11:21.158092 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:21 crc kubenswrapper[4791]: I1007 00:11:21.158101 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:21 crc kubenswrapper[4791]: I1007 00:11:21.880045 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:21 crc kubenswrapper[4791]: I1007 00:11:21.918880 4791 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 07 00:11:21 crc kubenswrapper[4791]: I1007 00:11:21.919011 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 07 00:11:22 crc kubenswrapper[4791]: I1007 00:11:22.159100 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:22 crc kubenswrapper[4791]: I1007 00:11:22.160618 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:22 crc kubenswrapper[4791]: I1007 00:11:22.160647 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:22 crc kubenswrapper[4791]: I1007 00:11:22.160656 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:24 crc kubenswrapper[4791]: E1007 00:11:24.117123 4791 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 07 00:11:24 crc kubenswrapper[4791]: I1007 00:11:24.445692 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:24 crc kubenswrapper[4791]: I1007 00:11:24.446025 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:24 crc kubenswrapper[4791]: I1007 00:11:24.447930 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:24 crc kubenswrapper[4791]: I1007 00:11:24.447983 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:24 crc kubenswrapper[4791]: I1007 00:11:24.447995 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:24 crc kubenswrapper[4791]: I1007 00:11:24.486927 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 07 00:11:24 crc kubenswrapper[4791]: I1007 00:11:24.487290 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:24 crc kubenswrapper[4791]: I1007 00:11:24.489126 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:24 crc kubenswrapper[4791]: I1007 00:11:24.489177 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:24 crc kubenswrapper[4791]: I1007 00:11:24.489193 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:25 crc kubenswrapper[4791]: I1007 00:11:25.745839 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:25 crc kubenswrapper[4791]: I1007 00:11:25.746130 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:25 crc kubenswrapper[4791]: I1007 00:11:25.748052 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:25 crc kubenswrapper[4791]: I1007 00:11:25.748161 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:25 crc kubenswrapper[4791]: I1007 00:11:25.748190 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:25 crc kubenswrapper[4791]: I1007 00:11:25.906930 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:25 crc kubenswrapper[4791]: I1007 00:11:25.914693 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:26 crc kubenswrapper[4791]: I1007 00:11:26.170505 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:26 crc kubenswrapper[4791]: I1007 00:11:26.171600 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:26 crc kubenswrapper[4791]: I1007 00:11:26.171672 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:26 crc kubenswrapper[4791]: I1007 00:11:26.171692 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:26 crc kubenswrapper[4791]: I1007 00:11:26.174831 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:27 crc kubenswrapper[4791]: I1007 00:11:27.172192 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:27 crc kubenswrapper[4791]: I1007 00:11:27.175834 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:27 crc kubenswrapper[4791]: I1007 00:11:27.175901 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:27 crc kubenswrapper[4791]: I1007 00:11:27.175919 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:27 crc kubenswrapper[4791]: I1007 00:11:27.738055 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 07 00:11:27 crc kubenswrapper[4791]: I1007 00:11:27.738452 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:27 crc kubenswrapper[4791]: I1007 00:11:27.740071 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:27 crc kubenswrapper[4791]: I1007 00:11:27.740152 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:27 crc kubenswrapper[4791]: I1007 00:11:27.740181 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:27 crc kubenswrapper[4791]: W1007 00:11:27.741937 4791 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 07 00:11:27 crc kubenswrapper[4791]: I1007 00:11:27.742115 4791 trace.go:236] Trace[67517790]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 00:11:17.740) (total time: 10001ms): Oct 07 00:11:27 crc kubenswrapper[4791]: Trace[67517790]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10000ms (00:11:27.741) Oct 07 00:11:27 crc kubenswrapper[4791]: Trace[67517790]: [10.001134961s] [10.001134961s] END Oct 07 00:11:27 crc kubenswrapper[4791]: E1007 00:11:27.742153 4791 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 07 00:11:27 crc kubenswrapper[4791]: I1007 00:11:27.999187 4791 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 07 00:11:27 crc kubenswrapper[4791]: I1007 00:11:27.999257 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 07 00:11:28 crc kubenswrapper[4791]: I1007 00:11:28.003768 4791 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Oct 07 00:11:28 crc kubenswrapper[4791]: I1007 00:11:28.003874 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 07 00:11:28 crc kubenswrapper[4791]: I1007 00:11:28.176142 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 07 00:11:28 crc kubenswrapper[4791]: I1007 00:11:28.177902 4791 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966" exitCode=255 Oct 07 00:11:28 crc kubenswrapper[4791]: I1007 00:11:28.178033 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:28 crc kubenswrapper[4791]: I1007 00:11:28.178020 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966"} Oct 07 00:11:28 crc kubenswrapper[4791]: I1007 00:11:28.178329 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:28 crc kubenswrapper[4791]: I1007 00:11:28.178829 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:28 crc kubenswrapper[4791]: I1007 00:11:28.178869 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:28 crc kubenswrapper[4791]: I1007 00:11:28.178881 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:28 crc kubenswrapper[4791]: I1007 00:11:28.179535 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:28 crc kubenswrapper[4791]: I1007 00:11:28.179561 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:28 crc kubenswrapper[4791]: I1007 00:11:28.179571 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:28 crc kubenswrapper[4791]: I1007 00:11:28.180100 4791 scope.go:117] "RemoveContainer" containerID="0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966" Oct 07 00:11:29 crc kubenswrapper[4791]: I1007 00:11:29.184002 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 07 00:11:29 crc kubenswrapper[4791]: I1007 00:11:29.188270 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0"} Oct 07 00:11:29 crc kubenswrapper[4791]: I1007 00:11:29.188467 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:29 crc kubenswrapper[4791]: I1007 00:11:29.189522 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:29 crc kubenswrapper[4791]: I1007 00:11:29.189563 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:29 crc kubenswrapper[4791]: I1007 00:11:29.189578 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:30 crc kubenswrapper[4791]: I1007 00:11:30.699813 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:30 crc kubenswrapper[4791]: I1007 00:11:30.700156 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:30 crc kubenswrapper[4791]: I1007 00:11:30.702266 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:30 crc kubenswrapper[4791]: I1007 00:11:30.702351 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:30 crc kubenswrapper[4791]: I1007 00:11:30.702376 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:31 crc kubenswrapper[4791]: I1007 00:11:31.891977 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:31 crc kubenswrapper[4791]: I1007 00:11:31.892171 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:31 crc kubenswrapper[4791]: I1007 00:11:31.893799 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:31 crc kubenswrapper[4791]: I1007 00:11:31.893887 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:31 crc kubenswrapper[4791]: I1007 00:11:31.893906 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:31 crc kubenswrapper[4791]: I1007 00:11:31.900215 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:31 crc kubenswrapper[4791]: I1007 00:11:31.919278 4791 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 07 00:11:31 crc kubenswrapper[4791]: I1007 00:11:31.919527 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 07 00:11:31 crc kubenswrapper[4791]: I1007 00:11:31.985677 4791 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 07 00:11:32 crc kubenswrapper[4791]: E1007 00:11:32.979405 4791 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.980907 4791 apiserver.go:52] "Watching apiserver" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.981697 4791 trace.go:236] Trace[2081610892]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 00:11:18.266) (total time: 14715ms): Oct 07 00:11:32 crc kubenswrapper[4791]: Trace[2081610892]: ---"Objects listed" error: 14715ms (00:11:32.981) Oct 07 00:11:32 crc kubenswrapper[4791]: Trace[2081610892]: [14.715196678s] [14.715196678s] END Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.981883 4791 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.982258 4791 trace.go:236] Trace[2086320946]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 00:11:18.405) (total time: 14576ms): Oct 07 00:11:32 crc kubenswrapper[4791]: Trace[2086320946]: ---"Objects listed" error: 14576ms (00:11:32.982) Oct 07 00:11:32 crc kubenswrapper[4791]: Trace[2086320946]: [14.576788273s] [14.576788273s] END Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.982389 4791 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.982421 4791 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.983951 4791 trace.go:236] Trace[1191256571]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 00:11:22.752) (total time: 10230ms): Oct 07 00:11:32 crc kubenswrapper[4791]: Trace[1191256571]: ---"Objects listed" error: 10230ms (00:11:32.983) Oct 07 00:11:32 crc kubenswrapper[4791]: Trace[1191256571]: [10.230953148s] [10.230953148s] END Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.983996 4791 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.984491 4791 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 07 00:11:32 crc kubenswrapper[4791]: E1007 00:11:32.984711 4791 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.984974 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.985599 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.985791 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:32 crc kubenswrapper[4791]: E1007 00:11:32.986067 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.986263 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.986268 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.986589 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.986267 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:32 crc kubenswrapper[4791]: E1007 00:11:32.986647 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:11:32 crc kubenswrapper[4791]: E1007 00:11:32.986671 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.987816 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.987831 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.988897 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.988906 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.988963 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.989092 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.989124 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.989047 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.989968 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 07 00:11:32 crc kubenswrapper[4791]: I1007 00:11:32.990794 4791 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.023805 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.043163 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.057220 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.068548 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.078753 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083241 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083294 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083315 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083333 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083354 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083371 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083389 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083431 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083449 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083465 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083485 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083501 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083518 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083532 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083547 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083564 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083581 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083603 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083619 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083637 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083652 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083672 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083688 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083690 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083699 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083952 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083993 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.083707 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084199 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084239 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084272 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084306 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084333 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084361 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084389 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084435 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084469 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084498 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084527 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084555 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084581 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084613 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084634 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084660 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084661 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084689 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084718 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084736 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084727 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084918 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085043 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085126 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085194 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085299 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.084753 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085446 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085481 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085547 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085576 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085602 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085627 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085652 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085674 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085697 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085716 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085733 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085749 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085766 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085784 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085801 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085823 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085841 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085859 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085876 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085895 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085913 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085933 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085952 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085967 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.085988 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086006 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086025 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086045 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086069 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086091 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086116 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086140 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086174 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086187 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086244 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086283 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086288 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086311 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086323 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086344 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086413 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086438 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086449 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086468 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086494 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086517 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086541 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086569 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086627 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086633 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086682 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086719 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086758 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086796 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086824 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086852 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086874 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086897 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086920 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086939 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086959 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086979 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.086999 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087017 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087037 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087057 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087067 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087081 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087103 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087127 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087135 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087152 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087176 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087178 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087202 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087227 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087253 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087276 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087299 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087319 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087344 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087365 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087384 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087466 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087472 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087493 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087512 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087525 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087652 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087780 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087817 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087838 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088225 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088248 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088268 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088293 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088319 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088342 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088369 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088396 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088446 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088478 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088501 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088527 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088606 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088631 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088658 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088683 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088720 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088743 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088769 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088810 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088837 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088913 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088939 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088967 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088990 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089012 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089070 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089094 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089118 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089141 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089168 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089198 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089223 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089247 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089277 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089302 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089324 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089348 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089374 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089423 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089451 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089506 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089530 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089553 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089579 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089602 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089629 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089652 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089677 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089695 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089714 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089734 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089754 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089771 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089789 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089809 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089827 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089847 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089871 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089899 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089925 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089950 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089975 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089999 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090022 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090046 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090118 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090144 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090171 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090196 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090218 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090244 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090268 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090329 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090360 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090389 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090518 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090582 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090644 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090676 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090703 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090732 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090761 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090786 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090863 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090894 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090921 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091535 4791 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091642 4791 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091667 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091685 4791 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091698 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091711 4791 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091723 4791 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091734 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091745 4791 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091755 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091767 4791 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091779 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091789 4791 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091803 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091814 4791 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091826 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091837 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091848 4791 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091858 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091869 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091879 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091890 4791 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091900 4791 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091910 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091920 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087846 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087871 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.087873 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.088746 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089366 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089418 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089438 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089522 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.089979 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090295 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090508 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090541 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090564 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090854 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090881 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090920 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.090968 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091246 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091357 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.091630 4791 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091683 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.091826 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.092002 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.092028 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.092187 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.092589 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.093183 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.093884 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:33.593847194 +0000 UTC m=+20.189785025 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.093971 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.094244 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.094394 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.094426 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.094795 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.094842 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.095102 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.095666 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.095839 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.095900 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.096186 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.096287 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:11:33.596242693 +0000 UTC m=+20.192180334 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.096374 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.096388 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.096473 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.096780 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.096947 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.097249 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.097315 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.097541 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.097571 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.097598 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.097755 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.097910 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.097926 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.098090 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.098140 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.098028 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.098218 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.098306 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.098502 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.098702 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.099227 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.099253 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.100347 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.100510 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.100650 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.100659 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.100678 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.100978 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.101241 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.101501 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.101739 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.102100 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.102346 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.102764 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.102852 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.103032 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.103624 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.103647 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.104532 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.104866 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.105091 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.105105 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.105137 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.105164 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.105951 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.106268 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.105398 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.105624 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.105713 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.105736 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.106567 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.106464 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.106798 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.106810 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.106825 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.106836 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.107098 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.107614 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.108286 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.115609 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.116129 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.116198 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.116382 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.116403 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.117289 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.118026 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.118455 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.119100 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.119461 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.119634 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.119849 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.120014 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.120182 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.120570 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.120924 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.121300 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.121615 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.121912 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.122885 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.122908 4791 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.123079 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.123605 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.123700 4791 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.123767 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:33.623746624 +0000 UTC m=+20.219684475 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.124797 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.125164 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.125692 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.127841 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.127856 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.128084 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.128223 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.128778 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.128905 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.128942 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.129405 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.148927 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.149930 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.150031 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.151688 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.152050 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.152864 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.153267 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.159442 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.159560 4791 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.159704 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:33.659670298 +0000 UTC m=+20.255607949 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.154623 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.160614 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.163508 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.166171 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.166202 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.166218 4791 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.166275 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:33.666256137 +0000 UTC m=+20.262193978 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.171588 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.175199 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.175479 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.178225 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.178532 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.178912 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.178963 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.179183 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.179199 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.179568 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.179567 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.179580 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.179702 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.179886 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.180260 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.180425 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.182344 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.183687 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.184659 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.185643 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.185796 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.185941 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.186628 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.192319 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.192793 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193005 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193035 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193033 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193199 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 00:11:33 crc kubenswrapper[4791]: W1007 00:11:33.193222 4791 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~secret/image-registry-operator-tls Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193195 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193275 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193235 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193242 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193446 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193483 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193500 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193515 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193517 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193525 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193564 4791 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193588 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193596 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193604 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193638 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193650 4791 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193660 4791 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193669 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193678 4791 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193687 4791 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193722 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193732 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193740 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193748 4791 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193756 4791 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193764 4791 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193773 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193784 4791 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193792 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193810 4791 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193819 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193827 4791 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193836 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193844 4791 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193852 4791 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193862 4791 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193870 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193850 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193879 4791 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193946 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193958 4791 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193970 4791 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193982 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.193992 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194004 4791 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194015 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194025 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194035 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194046 4791 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194056 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194066 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194076 4791 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194089 4791 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194099 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194485 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194497 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194544 4791 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194556 4791 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194553 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194567 4791 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194600 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194611 4791 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194623 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194630 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194722 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194633 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194676 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194765 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194777 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194788 4791 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194797 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194808 4791 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194820 4791 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194833 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194848 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194858 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194867 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194876 4791 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194886 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194894 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194902 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194817 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194912 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.194982 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195000 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195013 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195025 4791 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195038 4791 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195050 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195059 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195069 4791 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195081 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195094 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195107 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195119 4791 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195132 4791 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195135 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195145 4791 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195177 4791 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195188 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195210 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195222 4791 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195233 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195242 4791 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195252 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195262 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195272 4791 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195281 4791 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195305 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195315 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195325 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195335 4791 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195370 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195380 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195389 4791 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195420 4791 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195432 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195460 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195485 4791 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195496 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195505 4791 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195515 4791 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195524 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195533 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195542 4791 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195565 4791 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195574 4791 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195583 4791 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195592 4791 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195601 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195611 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195619 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195638 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195647 4791 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195657 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195666 4791 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195675 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195683 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195692 4791 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195701 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195708 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195717 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195726 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195743 4791 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195751 4791 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195760 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195770 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195778 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195807 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195817 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195826 4791 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195834 4791 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195846 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195854 4791 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195865 4791 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195881 4791 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195889 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195902 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195910 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195918 4791 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195926 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195936 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195945 4791 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195953 4791 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195961 4791 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195974 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195983 4791 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.195998 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.196006 4791 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.196014 4791 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.196026 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.211786 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.212169 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.219196 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.296748 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.297107 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.297174 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.297245 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.297312 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.297365 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.297453 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.297520 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.297583 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.297640 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.297695 4791 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.297749 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.297806 4791 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.297856 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.305638 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.314904 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.327710 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 00:11:33 crc kubenswrapper[4791]: W1007 00:11:33.344190 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-93b7986be947f5c2db7508da84741cbd3d22aad2d40cf665eeddbb64bba0543e WatchSource:0}: Error finding container 93b7986be947f5c2db7508da84741cbd3d22aad2d40cf665eeddbb64bba0543e: Status 404 returned error can't find the container with id 93b7986be947f5c2db7508da84741cbd3d22aad2d40cf665eeddbb64bba0543e Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.601019 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.601219 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:11:34.601179538 +0000 UTC m=+21.197117189 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.601310 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.601520 4791 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.601628 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:34.60160754 +0000 UTC m=+21.197545181 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.702336 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.702420 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:33 crc kubenswrapper[4791]: I1007 00:11:33.702468 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.702609 4791 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.702652 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.702711 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:34.702682678 +0000 UTC m=+21.298620349 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.702712 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.702767 4791 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.702669 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.702800 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.702816 4791 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.702819 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:34.702806231 +0000 UTC m=+21.298743902 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:33 crc kubenswrapper[4791]: E1007 00:11:33.702853 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:34.702841572 +0000 UTC m=+21.298779223 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.072448 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.073431 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.074999 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.075927 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.077310 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.078056 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.078882 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.080433 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.081292 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.082923 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.083716 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.085259 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.085998 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.086721 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.088087 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.089121 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.091103 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.091787 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.092682 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.093020 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.094703 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.095390 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.096155 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.097321 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.098169 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.099281 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.100161 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.101563 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.102151 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.103479 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.104122 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.104705 4791 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.104839 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.107226 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.107930 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.107983 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.108936 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.111581 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.112592 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.113962 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.114908 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.116787 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.117594 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.119063 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.120214 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.121700 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.122449 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.123883 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.125384 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.126308 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.126845 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.128112 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.128986 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.129682 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.131192 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.132081 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.133696 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.142923 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.165457 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.181380 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.203972 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.206278 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b"} Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.206328 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc"} Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.206342 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"93b7986be947f5c2db7508da84741cbd3d22aad2d40cf665eeddbb64bba0543e"} Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.208073 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"c176934f763db56c1f296817515383f9013ae23c4c314c947c44cc30987bb5f2"} Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.209451 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807"} Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.209487 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"506d9736a1948489b7d34edb083a31f1d1416806e595cc140788568e06d17146"} Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.269767 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.289585 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.315380 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.331284 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.347530 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.365767 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.380638 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.398822 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.412166 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.429172 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.443984 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.461798 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.478312 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.490980 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.609150 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.609309 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:34 crc kubenswrapper[4791]: E1007 00:11:34.609345 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:11:36.609320578 +0000 UTC m=+23.205258229 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:11:34 crc kubenswrapper[4791]: E1007 00:11:34.609479 4791 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:11:34 crc kubenswrapper[4791]: E1007 00:11:34.609543 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:36.609531664 +0000 UTC m=+23.205469335 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.675072 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-mgwcn"] Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.675435 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-mgwcn" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.678194 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.679195 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.691455 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.696524 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-2lpln"] Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.696913 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-2lpln" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.699225 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.699484 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.699525 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.700432 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.709790 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a1a690cd-8485-4ab6-aaca-f11c056810c0-serviceca\") pod \"node-ca-2lpln\" (UID: \"a1a690cd-8485-4ab6-aaca-f11c056810c0\") " pod="openshift-image-registry/node-ca-2lpln" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.709852 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.709927 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl2wr\" (UniqueName: \"kubernetes.io/projected/7af405e5-f2fb-4e2e-a452-25e96e1abe40-kube-api-access-gl2wr\") pod \"node-resolver-mgwcn\" (UID: \"7af405e5-f2fb-4e2e-a452-25e96e1abe40\") " pod="openshift-dns/node-resolver-mgwcn" Oct 07 00:11:34 crc kubenswrapper[4791]: E1007 00:11:34.709995 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.710003 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/7af405e5-f2fb-4e2e-a452-25e96e1abe40-hosts-file\") pod \"node-resolver-mgwcn\" (UID: \"7af405e5-f2fb-4e2e-a452-25e96e1abe40\") " pod="openshift-dns/node-resolver-mgwcn" Oct 07 00:11:34 crc kubenswrapper[4791]: E1007 00:11:34.710017 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:11:34 crc kubenswrapper[4791]: E1007 00:11:34.710030 4791 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.710033 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9zcs\" (UniqueName: \"kubernetes.io/projected/a1a690cd-8485-4ab6-aaca-f11c056810c0-kube-api-access-q9zcs\") pod \"node-ca-2lpln\" (UID: \"a1a690cd-8485-4ab6-aaca-f11c056810c0\") " pod="openshift-image-registry/node-ca-2lpln" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.710065 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:34 crc kubenswrapper[4791]: E1007 00:11:34.710081 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:36.710063516 +0000 UTC m=+23.306001167 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.710160 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:34 crc kubenswrapper[4791]: E1007 00:11:34.710227 4791 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.710231 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1a690cd-8485-4ab6-aaca-f11c056810c0-host\") pod \"node-ca-2lpln\" (UID: \"a1a690cd-8485-4ab6-aaca-f11c056810c0\") " pod="openshift-image-registry/node-ca-2lpln" Oct 07 00:11:34 crc kubenswrapper[4791]: E1007 00:11:34.710259 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:11:34 crc kubenswrapper[4791]: E1007 00:11:34.710294 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:36.710276732 +0000 UTC m=+23.306214383 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:11:34 crc kubenswrapper[4791]: E1007 00:11:34.710300 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:11:34 crc kubenswrapper[4791]: E1007 00:11:34.710324 4791 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:34 crc kubenswrapper[4791]: E1007 00:11:34.710392 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:36.710369994 +0000 UTC m=+23.306307645 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.718988 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.743018 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.772369 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.803737 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.811447 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a1a690cd-8485-4ab6-aaca-f11c056810c0-serviceca\") pod \"node-ca-2lpln\" (UID: \"a1a690cd-8485-4ab6-aaca-f11c056810c0\") " pod="openshift-image-registry/node-ca-2lpln" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.811718 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl2wr\" (UniqueName: \"kubernetes.io/projected/7af405e5-f2fb-4e2e-a452-25e96e1abe40-kube-api-access-gl2wr\") pod \"node-resolver-mgwcn\" (UID: \"7af405e5-f2fb-4e2e-a452-25e96e1abe40\") " pod="openshift-dns/node-resolver-mgwcn" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.811846 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/7af405e5-f2fb-4e2e-a452-25e96e1abe40-hosts-file\") pod \"node-resolver-mgwcn\" (UID: \"7af405e5-f2fb-4e2e-a452-25e96e1abe40\") " pod="openshift-dns/node-resolver-mgwcn" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.811923 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9zcs\" (UniqueName: \"kubernetes.io/projected/a1a690cd-8485-4ab6-aaca-f11c056810c0-kube-api-access-q9zcs\") pod \"node-ca-2lpln\" (UID: \"a1a690cd-8485-4ab6-aaca-f11c056810c0\") " pod="openshift-image-registry/node-ca-2lpln" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.811986 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1a690cd-8485-4ab6-aaca-f11c056810c0-host\") pod \"node-ca-2lpln\" (UID: \"a1a690cd-8485-4ab6-aaca-f11c056810c0\") " pod="openshift-image-registry/node-ca-2lpln" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.812060 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1a690cd-8485-4ab6-aaca-f11c056810c0-host\") pod \"node-ca-2lpln\" (UID: \"a1a690cd-8485-4ab6-aaca-f11c056810c0\") " pod="openshift-image-registry/node-ca-2lpln" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.812012 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/7af405e5-f2fb-4e2e-a452-25e96e1abe40-hosts-file\") pod \"node-resolver-mgwcn\" (UID: \"7af405e5-f2fb-4e2e-a452-25e96e1abe40\") " pod="openshift-dns/node-resolver-mgwcn" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.814924 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a1a690cd-8485-4ab6-aaca-f11c056810c0-serviceca\") pod \"node-ca-2lpln\" (UID: \"a1a690cd-8485-4ab6-aaca-f11c056810c0\") " pod="openshift-image-registry/node-ca-2lpln" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.833487 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9zcs\" (UniqueName: \"kubernetes.io/projected/a1a690cd-8485-4ab6-aaca-f11c056810c0-kube-api-access-q9zcs\") pod \"node-ca-2lpln\" (UID: \"a1a690cd-8485-4ab6-aaca-f11c056810c0\") " pod="openshift-image-registry/node-ca-2lpln" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.835057 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl2wr\" (UniqueName: \"kubernetes.io/projected/7af405e5-f2fb-4e2e-a452-25e96e1abe40-kube-api-access-gl2wr\") pod \"node-resolver-mgwcn\" (UID: \"7af405e5-f2fb-4e2e-a452-25e96e1abe40\") " pod="openshift-dns/node-resolver-mgwcn" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.843999 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.881725 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.912642 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.969144 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:34 crc kubenswrapper[4791]: I1007 00:11:34.988151 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-mgwcn" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.002622 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.009048 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-2lpln" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.034578 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.058141 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.067946 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.067985 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:35 crc kubenswrapper[4791]: E1007 00:11:35.068105 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.068153 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:35 crc kubenswrapper[4791]: E1007 00:11:35.068206 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:11:35 crc kubenswrapper[4791]: E1007 00:11:35.068256 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.074254 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.096780 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.117872 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.129957 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.142838 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.156907 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: W1007 00:11:35.167276 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1a690cd_8485_4ab6_aaca_f11c056810c0.slice/crio-bea965dd573e4e341960cbef13f0fa3a1270115743fb50891635a611149acc12 WatchSource:0}: Error finding container bea965dd573e4e341960cbef13f0fa3a1270115743fb50891635a611149acc12: Status 404 returned error can't find the container with id bea965dd573e4e341960cbef13f0fa3a1270115743fb50891635a611149acc12 Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.213341 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-2lpln" event={"ID":"a1a690cd-8485-4ab6-aaca-f11c056810c0","Type":"ContainerStarted","Data":"bea965dd573e4e341960cbef13f0fa3a1270115743fb50891635a611149acc12"} Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.213980 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-mgwcn" event={"ID":"7af405e5-f2fb-4e2e-a452-25e96e1abe40","Type":"ContainerStarted","Data":"7978da6391d936e1e459877afe4bd742ac45bdbced6e667f48ae5c2653a01633"} Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.626498 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-h728c"] Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.626917 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-xbjfx"] Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.627124 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.627860 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.629625 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.630588 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.631169 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.631296 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.631981 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-q4xzr"] Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.632605 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-n6cgf"] Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.632813 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.633459 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.635619 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.635852 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.635988 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.636192 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 07 00:11:35 crc kubenswrapper[4791]: W1007 00:11:35.636392 4791 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl": failed to list *v1.Secret: secrets "ovn-kubernetes-node-dockercfg-pwtwl" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Oct 07 00:11:35 crc kubenswrapper[4791]: E1007 00:11:35.636467 4791 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-kubernetes-node-dockercfg-pwtwl\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-kubernetes-node-dockercfg-pwtwl\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.641801 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.642158 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 07 00:11:35 crc kubenswrapper[4791]: W1007 00:11:35.642378 4791 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": failed to list *v1.Secret: secrets "ovn-node-metrics-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Oct 07 00:11:35 crc kubenswrapper[4791]: E1007 00:11:35.642437 4791 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-node-metrics-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-node-metrics-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.642618 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.643691 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.644133 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.645613 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.645885 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.648345 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.649980 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.662689 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.677315 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.689951 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.701623 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.714456 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.720645 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-ovn\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.720698 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/ae232b81-12ca-4baa-ad86-96f3fbd32ac9-rootfs\") pod \"machine-config-daemon-h728c\" (UID: \"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\") " pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.720724 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8a389028-af4a-4b2c-a638-04eac9238628-cni-binary-copy\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.720763 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-kubelet\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.720790 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-os-release\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.720814 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4b4d\" (UniqueName: \"kubernetes.io/projected/8a389028-af4a-4b2c-a638-04eac9238628-kube-api-access-w4b4d\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.720836 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-node-log\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.720859 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-multus-socket-dir-parent\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.720888 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-run-netns\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.720910 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-slash\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.720942 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.720970 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-multus-cni-dir\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721100 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8a389028-af4a-4b2c-a638-04eac9238628-multus-daemon-config\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721227 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-systemd-units\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721260 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ae232b81-12ca-4baa-ad86-96f3fbd32ac9-mcd-auth-proxy-config\") pod \"machine-config-daemon-h728c\" (UID: \"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\") " pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721310 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-run-ovn-kubernetes\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721348 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-cni-netd\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721385 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-system-cni-dir\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721449 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-var-lib-kubelet\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721494 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-var-lib-openvswitch\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721525 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-openvswitch\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721552 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/caaf5441-9d24-4a73-9c10-a28c7278c2f3-cni-binary-copy\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721613 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-etc-kubernetes\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721683 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-systemd\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721720 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-cnibin\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721815 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-hostroot\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721893 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-etc-openvswitch\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721935 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-ovnkube-script-lib\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.721972 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lg8r\" (UniqueName: \"kubernetes.io/projected/ae232b81-12ca-4baa-ad86-96f3fbd32ac9-kube-api-access-8lg8r\") pod \"machine-config-daemon-h728c\" (UID: \"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\") " pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722008 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/caaf5441-9d24-4a73-9c10-a28c7278c2f3-cnibin\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722052 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-run-k8s-cni-cncf-io\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722079 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-log-socket\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722097 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/47547f34-4a66-4d60-8d38-af69eb320b1d-ovn-node-metrics-cert\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722169 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-run-netns\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722265 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bz9s\" (UniqueName: \"kubernetes.io/projected/caaf5441-9d24-4a73-9c10-a28c7278c2f3-kube-api-access-9bz9s\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722365 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/caaf5441-9d24-4a73-9c10-a28c7278c2f3-system-cni-dir\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722395 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-multus-conf-dir\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722470 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-ovnkube-config\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722495 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/caaf5441-9d24-4a73-9c10-a28c7278c2f3-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722558 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-var-lib-cni-bin\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722578 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grsfn\" (UniqueName: \"kubernetes.io/projected/47547f34-4a66-4d60-8d38-af69eb320b1d-kube-api-access-grsfn\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722596 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ae232b81-12ca-4baa-ad86-96f3fbd32ac9-proxy-tls\") pod \"machine-config-daemon-h728c\" (UID: \"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\") " pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722653 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-cni-bin\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722674 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-env-overrides\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722734 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/caaf5441-9d24-4a73-9c10-a28c7278c2f3-os-release\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722780 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/caaf5441-9d24-4a73-9c10-a28c7278c2f3-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722812 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-var-lib-cni-multus\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.722848 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-run-multus-certs\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.734833 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.751529 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.764589 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.780257 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.794007 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.807839 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.819936 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823292 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-kubelet\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823341 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-os-release\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823367 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4b4d\" (UniqueName: \"kubernetes.io/projected/8a389028-af4a-4b2c-a638-04eac9238628-kube-api-access-w4b4d\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823390 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-node-log\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823437 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-multus-socket-dir-parent\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823433 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-kubelet\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823467 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-run-netns\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823492 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-slash\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823516 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823544 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-multus-cni-dir\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823597 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8a389028-af4a-4b2c-a638-04eac9238628-multus-daemon-config\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823605 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-multus-socket-dir-parent\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823636 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-slash\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823636 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-node-log\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823591 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823731 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-os-release\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823822 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-multus-cni-dir\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.823869 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-run-netns\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824249 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-systemd-units\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824287 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ae232b81-12ca-4baa-ad86-96f3fbd32ac9-mcd-auth-proxy-config\") pod \"machine-config-daemon-h728c\" (UID: \"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\") " pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824316 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-run-ovn-kubernetes\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824338 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-cni-netd\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824349 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-systemd-units\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824362 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-system-cni-dir\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824352 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8a389028-af4a-4b2c-a638-04eac9238628-multus-daemon-config\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824389 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-cni-netd\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824390 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-var-lib-kubelet\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824449 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-var-lib-kubelet\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824503 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-var-lib-openvswitch\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824538 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-openvswitch\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824567 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/caaf5441-9d24-4a73-9c10-a28c7278c2f3-cni-binary-copy\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824542 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-var-lib-openvswitch\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824594 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-etc-kubernetes\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824620 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-etc-kubernetes\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824443 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-system-cni-dir\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824636 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-systemd\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824660 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-systemd\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824673 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-openvswitch\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824684 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-cnibin\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824710 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-etc-openvswitch\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824729 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-ovnkube-script-lib\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824751 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lg8r\" (UniqueName: \"kubernetes.io/projected/ae232b81-12ca-4baa-ad86-96f3fbd32ac9-kube-api-access-8lg8r\") pod \"machine-config-daemon-h728c\" (UID: \"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\") " pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824772 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/caaf5441-9d24-4a73-9c10-a28c7278c2f3-cnibin\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824781 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-cnibin\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824791 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-run-k8s-cni-cncf-io\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824813 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-hostroot\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824823 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/caaf5441-9d24-4a73-9c10-a28c7278c2f3-cnibin\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824835 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/47547f34-4a66-4d60-8d38-af69eb320b1d-ovn-node-metrics-cert\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824794 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-etc-openvswitch\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824871 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-run-netns\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824874 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-run-k8s-cni-cncf-io\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824894 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-log-socket\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824917 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-hostroot\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824941 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bz9s\" (UniqueName: \"kubernetes.io/projected/caaf5441-9d24-4a73-9c10-a28c7278c2f3-kube-api-access-9bz9s\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824984 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/caaf5441-9d24-4a73-9c10-a28c7278c2f3-system-cni-dir\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.824943 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-log-socket\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825017 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-multus-conf-dir\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825062 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/caaf5441-9d24-4a73-9c10-a28c7278c2f3-system-cni-dir\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825080 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-multus-conf-dir\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825065 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-ovnkube-config\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825096 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-run-netns\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825140 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/caaf5441-9d24-4a73-9c10-a28c7278c2f3-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825230 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-var-lib-cni-bin\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825259 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ae232b81-12ca-4baa-ad86-96f3fbd32ac9-proxy-tls\") pod \"machine-config-daemon-h728c\" (UID: \"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\") " pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825282 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-cni-bin\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825299 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-env-overrides\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825315 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grsfn\" (UniqueName: \"kubernetes.io/projected/47547f34-4a66-4d60-8d38-af69eb320b1d-kube-api-access-grsfn\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825318 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-var-lib-cni-bin\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825331 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/caaf5441-9d24-4a73-9c10-a28c7278c2f3-os-release\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825322 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/caaf5441-9d24-4a73-9c10-a28c7278c2f3-cni-binary-copy\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825352 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/caaf5441-9d24-4a73-9c10-a28c7278c2f3-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825375 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-var-lib-cni-multus\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825391 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-run-multus-certs\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825328 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ae232b81-12ca-4baa-ad86-96f3fbd32ac9-mcd-auth-proxy-config\") pod \"machine-config-daemon-h728c\" (UID: \"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\") " pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825434 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-cni-bin\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825434 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-ovn\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825458 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-ovn\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825516 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/ae232b81-12ca-4baa-ad86-96f3fbd32ac9-rootfs\") pod \"machine-config-daemon-h728c\" (UID: \"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\") " pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825503 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-var-lib-cni-multus\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825548 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8a389028-af4a-4b2c-a638-04eac9238628-cni-binary-copy\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825563 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/ae232b81-12ca-4baa-ad86-96f3fbd32ac9-rootfs\") pod \"machine-config-daemon-h728c\" (UID: \"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\") " pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825578 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/caaf5441-9d24-4a73-9c10-a28c7278c2f3-os-release\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825565 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8a389028-af4a-4b2c-a638-04eac9238628-host-run-multus-certs\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825692 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/caaf5441-9d24-4a73-9c10-a28c7278c2f3-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825925 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-env-overrides\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.825973 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-ovnkube-script-lib\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.826141 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/caaf5441-9d24-4a73-9c10-a28c7278c2f3-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.826196 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-ovnkube-config\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.826255 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-run-ovn-kubernetes\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.826363 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8a389028-af4a-4b2c-a638-04eac9238628-cni-binary-copy\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.830169 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ae232b81-12ca-4baa-ad86-96f3fbd32ac9-proxy-tls\") pod \"machine-config-daemon-h728c\" (UID: \"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\") " pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.842684 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4b4d\" (UniqueName: \"kubernetes.io/projected/8a389028-af4a-4b2c-a638-04eac9238628-kube-api-access-w4b4d\") pod \"multus-xbjfx\" (UID: \"8a389028-af4a-4b2c-a638-04eac9238628\") " pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.842968 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grsfn\" (UniqueName: \"kubernetes.io/projected/47547f34-4a66-4d60-8d38-af69eb320b1d-kube-api-access-grsfn\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.844182 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bz9s\" (UniqueName: \"kubernetes.io/projected/caaf5441-9d24-4a73-9c10-a28c7278c2f3-kube-api-access-9bz9s\") pod \"multus-additional-cni-plugins-q4xzr\" (UID: \"caaf5441-9d24-4a73-9c10-a28c7278c2f3\") " pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.844878 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lg8r\" (UniqueName: \"kubernetes.io/projected/ae232b81-12ca-4baa-ad86-96f3fbd32ac9-kube-api-access-8lg8r\") pod \"machine-config-daemon-h728c\" (UID: \"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\") " pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.845760 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.859291 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.871251 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.884779 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.896488 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.908767 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.923878 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.939313 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.952835 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-xbjfx" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.954385 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.961320 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.968601 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.972854 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:35 crc kubenswrapper[4791]: I1007 00:11:35.987138 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:35Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.003091 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.226447 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-2lpln" event={"ID":"a1a690cd-8485-4ab6-aaca-f11c056810c0","Type":"ContainerStarted","Data":"b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590"} Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.228097 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-mgwcn" event={"ID":"7af405e5-f2fb-4e2e-a452-25e96e1abe40","Type":"ContainerStarted","Data":"2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403"} Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.230817 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xbjfx" event={"ID":"8a389028-af4a-4b2c-a638-04eac9238628","Type":"ContainerStarted","Data":"faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053"} Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.230866 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xbjfx" event={"ID":"8a389028-af4a-4b2c-a638-04eac9238628","Type":"ContainerStarted","Data":"02aeb2d18b58ca6903689ddd8c7359578d38b768cae0a5af3f9080e711702ce0"} Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.231877 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" event={"ID":"caaf5441-9d24-4a73-9c10-a28c7278c2f3","Type":"ContainerStarted","Data":"785b39a7cd7fca86a03ad769a8218aa3a489f216ed94f5e0a42cd6ebd2da8243"} Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.234549 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerStarted","Data":"49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7"} Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.234594 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerStarted","Data":"b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b"} Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.234613 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerStarted","Data":"93fdbf0448588a51d597dee1897ce7dff89e3043c72f90b232882493a0e4684a"} Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.241194 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.259263 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.277115 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.293715 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.307891 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.321914 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.336611 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.353764 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.376430 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.392317 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.404595 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.417637 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.431801 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.458816 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.472517 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.488240 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.507016 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.520846 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.536292 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.547483 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.549908 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.561088 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.575759 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.590222 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.605986 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.618979 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.634326 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:11:36 crc kubenswrapper[4791]: E1007 00:11:36.634731 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:11:40.634694228 +0000 UTC m=+27.230631909 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.634718 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:36Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.634831 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:36 crc kubenswrapper[4791]: E1007 00:11:36.635022 4791 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:11:36 crc kubenswrapper[4791]: E1007 00:11:36.635082 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:40.635072829 +0000 UTC m=+27.231010480 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.736201 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.736272 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.736316 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:36 crc kubenswrapper[4791]: E1007 00:11:36.736450 4791 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:11:36 crc kubenswrapper[4791]: E1007 00:11:36.736557 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:11:36 crc kubenswrapper[4791]: E1007 00:11:36.736582 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:40.736556648 +0000 UTC m=+27.332494299 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:11:36 crc kubenswrapper[4791]: E1007 00:11:36.736594 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:11:36 crc kubenswrapper[4791]: E1007 00:11:36.736613 4791 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:36 crc kubenswrapper[4791]: E1007 00:11:36.736538 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:11:36 crc kubenswrapper[4791]: E1007 00:11:36.736662 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:40.736644631 +0000 UTC m=+27.332582292 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:36 crc kubenswrapper[4791]: E1007 00:11:36.736680 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:11:36 crc kubenswrapper[4791]: E1007 00:11:36.736704 4791 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:36 crc kubenswrapper[4791]: E1007 00:11:36.736780 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:40.736754624 +0000 UTC m=+27.332692465 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.809738 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.820364 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/47547f34-4a66-4d60-8d38-af69eb320b1d-ovn-node-metrics-cert\") pod \"ovnkube-node-n6cgf\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:36 crc kubenswrapper[4791]: I1007 00:11:36.874638 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:36 crc kubenswrapper[4791]: W1007 00:11:36.890716 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod47547f34_4a66_4d60_8d38_af69eb320b1d.slice/crio-9735fa07b69c4bac03dad134cd5beee003b25a4f931f7edd798137c289e11519 WatchSource:0}: Error finding container 9735fa07b69c4bac03dad134cd5beee003b25a4f931f7edd798137c289e11519: Status 404 returned error can't find the container with id 9735fa07b69c4bac03dad134cd5beee003b25a4f931f7edd798137c289e11519 Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.068868 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.068960 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:37 crc kubenswrapper[4791]: E1007 00:11:37.069009 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.068967 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:37 crc kubenswrapper[4791]: E1007 00:11:37.069158 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:11:37 crc kubenswrapper[4791]: E1007 00:11:37.069300 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.239486 4791 generic.go:334] "Generic (PLEG): container finished" podID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerID="a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e" exitCode=0 Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.239526 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerDied","Data":"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e"} Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.240841 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerStarted","Data":"9735fa07b69c4bac03dad134cd5beee003b25a4f931f7edd798137c289e11519"} Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.242667 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3"} Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.244443 4791 generic.go:334] "Generic (PLEG): container finished" podID="caaf5441-9d24-4a73-9c10-a28c7278c2f3" containerID="dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377" exitCode=0 Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.244534 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" event={"ID":"caaf5441-9d24-4a73-9c10-a28c7278c2f3","Type":"ContainerDied","Data":"dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377"} Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.267928 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.282793 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.296829 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.312758 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.330016 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.347851 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.364134 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.378037 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.388149 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.403623 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.420975 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.436570 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.464931 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.483852 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.496610 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.511652 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.525218 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.537354 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.565103 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.581679 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.595104 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.614317 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.630248 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.649334 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.666817 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.684746 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.777300 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.793423 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.794804 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.796813 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.810445 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.824864 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.843272 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.856517 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.871669 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.895161 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.911655 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.926755 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.942153 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.956042 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.969021 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:37 crc kubenswrapper[4791]: I1007 00:11:37.985490 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:37Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.009269 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.026392 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.041439 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.055471 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.076158 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.100135 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.139967 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.180118 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.229730 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.253152 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerStarted","Data":"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f"} Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.253199 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerStarted","Data":"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd"} Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.253211 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerStarted","Data":"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f"} Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.253223 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerStarted","Data":"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b"} Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.255198 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" event={"ID":"caaf5441-9d24-4a73-9c10-a28c7278c2f3","Type":"ContainerStarted","Data":"ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822"} Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.268847 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.302286 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.338919 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.378798 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.420669 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.457661 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.499732 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.539661 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.594739 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.622046 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.667376 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.707287 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.740126 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.780050 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.823033 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.859531 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.901726 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.922572 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.925765 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.939668 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:38 crc kubenswrapper[4791]: I1007 00:11:38.957693 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.000346 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:38Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.044740 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.068951 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.069002 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.068966 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:39 crc kubenswrapper[4791]: E1007 00:11:39.069096 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:11:39 crc kubenswrapper[4791]: E1007 00:11:39.069171 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:11:39 crc kubenswrapper[4791]: E1007 00:11:39.069238 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.079880 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.121456 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.157505 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.198619 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.242934 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.261029 4791 generic.go:334] "Generic (PLEG): container finished" podID="caaf5441-9d24-4a73-9c10-a28c7278c2f3" containerID="ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822" exitCode=0 Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.261118 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" event={"ID":"caaf5441-9d24-4a73-9c10-a28c7278c2f3","Type":"ContainerDied","Data":"ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822"} Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.265794 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerStarted","Data":"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f"} Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.265913 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerStarted","Data":"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79"} Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.281929 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.320546 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.360418 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.385760 4791 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.387626 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.387660 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.387671 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.387796 4791 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.399067 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.450546 4791 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.450820 4791 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.451913 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.451959 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.451972 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.451993 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.452010 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:39Z","lastTransitionTime":"2025-10-07T00:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:39 crc kubenswrapper[4791]: E1007 00:11:39.463716 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.467311 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.467349 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.467360 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.467375 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.467386 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:39Z","lastTransitionTime":"2025-10-07T00:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.476951 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: E1007 00:11:39.481681 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.485312 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.485350 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.485362 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.485427 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.485442 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:39Z","lastTransitionTime":"2025-10-07T00:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:39 crc kubenswrapper[4791]: E1007 00:11:39.497301 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.500657 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.500706 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.500718 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.500738 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.500750 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:39Z","lastTransitionTime":"2025-10-07T00:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:39 crc kubenswrapper[4791]: E1007 00:11:39.513835 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.518200 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.518251 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.518263 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.518281 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.518295 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:39Z","lastTransitionTime":"2025-10-07T00:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.522993 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: E1007 00:11:39.530774 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: E1007 00:11:39.530918 4791 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.532886 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.532929 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.532942 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.532960 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.532973 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:39Z","lastTransitionTime":"2025-10-07T00:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.559503 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.598102 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.636106 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.636158 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.636171 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.636192 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.636203 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:39Z","lastTransitionTime":"2025-10-07T00:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.642034 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.679806 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.718633 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.738682 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.738718 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.738727 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.738743 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.738753 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:39Z","lastTransitionTime":"2025-10-07T00:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.759633 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.806623 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.840752 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.840798 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.840809 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.840826 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.840838 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:39Z","lastTransitionTime":"2025-10-07T00:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.843500 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.880787 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.918643 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.942808 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.942843 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.942853 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.942893 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.942904 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:39Z","lastTransitionTime":"2025-10-07T00:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:39 crc kubenswrapper[4791]: I1007 00:11:39.958231 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.000704 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:39Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.044935 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.045193 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.045219 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.045230 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.045244 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.045616 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:40Z","lastTransitionTime":"2025-10-07T00:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.080862 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.122598 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.148104 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.148174 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.148195 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.148226 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.148245 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:40Z","lastTransitionTime":"2025-10-07T00:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.162236 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.201083 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.238997 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.251367 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.251449 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.251464 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.251488 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.251503 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:40Z","lastTransitionTime":"2025-10-07T00:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.274508 4791 generic.go:334] "Generic (PLEG): container finished" podID="caaf5441-9d24-4a73-9c10-a28c7278c2f3" containerID="30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04" exitCode=0 Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.274561 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" event={"ID":"caaf5441-9d24-4a73-9c10-a28c7278c2f3","Type":"ContainerDied","Data":"30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04"} Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.287756 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.322880 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.353715 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.353754 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.353767 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.353783 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.353793 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:40Z","lastTransitionTime":"2025-10-07T00:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.359760 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.399773 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.438840 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.455968 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.456028 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.456043 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.456065 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.456077 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:40Z","lastTransitionTime":"2025-10-07T00:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.484127 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.541858 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.561060 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.561106 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.561116 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.561139 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.561151 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:40Z","lastTransitionTime":"2025-10-07T00:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.571939 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.600544 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.638931 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.664292 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.664381 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.664414 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.664436 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.664450 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:40Z","lastTransitionTime":"2025-10-07T00:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.675364 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.675521 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:40 crc kubenswrapper[4791]: E1007 00:11:40.675610 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:11:48.675556515 +0000 UTC m=+35.271494196 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:11:40 crc kubenswrapper[4791]: E1007 00:11:40.675695 4791 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:11:40 crc kubenswrapper[4791]: E1007 00:11:40.675760 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:48.67574584 +0000 UTC m=+35.271683511 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.680072 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.705629 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.726749 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.767320 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.767372 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.767385 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.767417 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.767429 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:40Z","lastTransitionTime":"2025-10-07T00:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.772533 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.777451 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.777542 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.777623 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:40 crc kubenswrapper[4791]: E1007 00:11:40.777741 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:11:40 crc kubenswrapper[4791]: E1007 00:11:40.777757 4791 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:11:40 crc kubenswrapper[4791]: E1007 00:11:40.777778 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:11:40 crc kubenswrapper[4791]: E1007 00:11:40.777804 4791 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:40 crc kubenswrapper[4791]: E1007 00:11:40.777831 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:48.777812627 +0000 UTC m=+35.373750448 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:11:40 crc kubenswrapper[4791]: E1007 00:11:40.777838 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:11:40 crc kubenswrapper[4791]: E1007 00:11:40.777861 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:11:40 crc kubenswrapper[4791]: E1007 00:11:40.777877 4791 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:40 crc kubenswrapper[4791]: E1007 00:11:40.777886 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:48.777854938 +0000 UTC m=+35.373792629 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:40 crc kubenswrapper[4791]: E1007 00:11:40.777927 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:48.777907519 +0000 UTC m=+35.373845170 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.804589 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.840871 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.870444 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.870488 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.870499 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.870517 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.870528 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:40Z","lastTransitionTime":"2025-10-07T00:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.884441 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.917831 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.961962 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:40Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.973890 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.973934 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.973943 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.973956 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:40 crc kubenswrapper[4791]: I1007 00:11:40.973967 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:40Z","lastTransitionTime":"2025-10-07T00:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.007283 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.045485 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.068253 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.068377 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:41 crc kubenswrapper[4791]: E1007 00:11:41.068446 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.068468 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:41 crc kubenswrapper[4791]: E1007 00:11:41.068639 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:11:41 crc kubenswrapper[4791]: E1007 00:11:41.068887 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.076180 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.076429 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.076605 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.076768 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.076909 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:41Z","lastTransitionTime":"2025-10-07T00:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.080967 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.119230 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.160003 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.179953 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.180008 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.180019 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.180037 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.180053 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:41Z","lastTransitionTime":"2025-10-07T00:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.203220 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.245391 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.279369 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.280522 4791 generic.go:334] "Generic (PLEG): container finished" podID="caaf5441-9d24-4a73-9c10-a28c7278c2f3" containerID="e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11" exitCode=0 Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.280557 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" event={"ID":"caaf5441-9d24-4a73-9c10-a28c7278c2f3","Type":"ContainerDied","Data":"e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11"} Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.281728 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.281774 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.281786 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.281813 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.281840 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:41Z","lastTransitionTime":"2025-10-07T00:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.285303 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerStarted","Data":"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd"} Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.324944 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.365727 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.385346 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.385437 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.385457 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.385482 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.385499 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:41Z","lastTransitionTime":"2025-10-07T00:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.401870 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.436698 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.477932 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.487524 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.487565 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.487574 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.487593 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.487608 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:41Z","lastTransitionTime":"2025-10-07T00:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.518782 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.570209 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.590627 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.590678 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.590692 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.590712 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.590726 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:41Z","lastTransitionTime":"2025-10-07T00:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.600211 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.642839 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.683213 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.694232 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.694281 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.694292 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.694309 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.694321 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:41Z","lastTransitionTime":"2025-10-07T00:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.721870 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.761775 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.796950 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.796999 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.797012 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.797028 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.797039 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:41Z","lastTransitionTime":"2025-10-07T00:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.802618 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.840142 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.880906 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.899274 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.899325 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.899341 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.899361 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.899370 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:41Z","lastTransitionTime":"2025-10-07T00:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.926259 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:41 crc kubenswrapper[4791]: I1007 00:11:41.966446 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:41Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.001748 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.001813 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.001824 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.001842 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.001852 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:42Z","lastTransitionTime":"2025-10-07T00:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.003348 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.041270 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.104604 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.104648 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.104661 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.104678 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.104692 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:42Z","lastTransitionTime":"2025-10-07T00:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.206966 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.207006 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.207016 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.207035 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.207046 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:42Z","lastTransitionTime":"2025-10-07T00:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.292108 4791 generic.go:334] "Generic (PLEG): container finished" podID="caaf5441-9d24-4a73-9c10-a28c7278c2f3" containerID="1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c" exitCode=0 Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.292166 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" event={"ID":"caaf5441-9d24-4a73-9c10-a28c7278c2f3","Type":"ContainerDied","Data":"1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c"} Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.310955 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.310991 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.311002 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.311018 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.311030 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:42Z","lastTransitionTime":"2025-10-07T00:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.311988 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.327372 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.349854 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.364983 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.379639 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.394560 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.406022 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.413835 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.413944 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.413964 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.414006 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.414026 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:42Z","lastTransitionTime":"2025-10-07T00:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.418555 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.440754 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.454328 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.482925 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.516867 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.516925 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.516939 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.516961 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.516974 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:42Z","lastTransitionTime":"2025-10-07T00:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.519937 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.559786 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.614863 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.619991 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.620045 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.620058 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.620080 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.620093 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:42Z","lastTransitionTime":"2025-10-07T00:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.642153 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:42Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.723648 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.723686 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.723700 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.723720 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.723733 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:42Z","lastTransitionTime":"2025-10-07T00:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.826587 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.826964 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.826978 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.827000 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.827013 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:42Z","lastTransitionTime":"2025-10-07T00:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.931063 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.931115 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.931131 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.931152 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:42 crc kubenswrapper[4791]: I1007 00:11:42.931166 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:42Z","lastTransitionTime":"2025-10-07T00:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.034232 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.034280 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.034294 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.034317 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.034331 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:43Z","lastTransitionTime":"2025-10-07T00:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.069477 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.069522 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.069563 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:43 crc kubenswrapper[4791]: E1007 00:11:43.069649 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:11:43 crc kubenswrapper[4791]: E1007 00:11:43.069768 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:11:43 crc kubenswrapper[4791]: E1007 00:11:43.069937 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.137144 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.137189 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.137203 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.137228 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.137244 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:43Z","lastTransitionTime":"2025-10-07T00:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.240842 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.240924 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.240962 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.240985 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.241004 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:43Z","lastTransitionTime":"2025-10-07T00:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.301656 4791 generic.go:334] "Generic (PLEG): container finished" podID="caaf5441-9d24-4a73-9c10-a28c7278c2f3" containerID="a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36" exitCode=0 Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.301735 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" event={"ID":"caaf5441-9d24-4a73-9c10-a28c7278c2f3","Type":"ContainerDied","Data":"a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36"} Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.307911 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerStarted","Data":"81a585aaceb493f7f11358e37bc52127d65de5c087d5be00e9b519e42edd2628"} Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.308249 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.321351 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.337323 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.344919 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.344995 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.345012 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.345041 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.345061 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:43Z","lastTransitionTime":"2025-10-07T00:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.351133 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.360819 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.370806 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.395837 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.420479 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.439535 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.449598 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.449648 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.449661 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.449680 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.449700 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:43Z","lastTransitionTime":"2025-10-07T00:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.455058 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.471042 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.487685 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.499707 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.515590 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.533089 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.553303 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.553361 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.553376 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.553397 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.553432 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:43Z","lastTransitionTime":"2025-10-07T00:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.554807 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.568221 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.581994 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.592824 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.606202 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.627470 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81a585aaceb493f7f11358e37bc52127d65de5c087d5be00e9b519e42edd2628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.643606 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.655509 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.655558 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.655566 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.655581 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.655591 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:43Z","lastTransitionTime":"2025-10-07T00:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.657129 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.671264 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.683773 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.696767 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.718235 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.732644 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.751529 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.758421 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.758470 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.758483 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.758504 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.758522 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:43Z","lastTransitionTime":"2025-10-07T00:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.768025 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.801496 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.843508 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:43Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.861779 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.861844 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.861854 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.861871 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.861880 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:43Z","lastTransitionTime":"2025-10-07T00:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.964671 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.964721 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.964731 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.964749 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:43 crc kubenswrapper[4791]: I1007 00:11:43.964762 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:43Z","lastTransitionTime":"2025-10-07T00:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.067027 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.067293 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.067354 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.067471 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.067554 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:44Z","lastTransitionTime":"2025-10-07T00:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.089868 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.106249 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.129573 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.153392 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81a585aaceb493f7f11358e37bc52127d65de5c087d5be00e9b519e42edd2628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.169845 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.169912 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.169932 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.169964 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.169985 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:44Z","lastTransitionTime":"2025-10-07T00:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.174499 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.203306 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.219693 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.242039 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.254928 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.272813 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.272882 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.272899 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.272927 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.272945 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:44Z","lastTransitionTime":"2025-10-07T00:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.276924 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.293399 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.316971 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" event={"ID":"caaf5441-9d24-4a73-9c10-a28c7278c2f3","Type":"ContainerStarted","Data":"fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9"} Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.317101 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.317650 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.323424 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.340084 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.365878 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.376035 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.376102 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.376122 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.376153 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.376175 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:44Z","lastTransitionTime":"2025-10-07T00:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.404739 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.441191 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.478868 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.478920 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.478929 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.478947 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.478960 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:44Z","lastTransitionTime":"2025-10-07T00:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.481753 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.519922 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.560344 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.581671 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.581714 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.581724 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.581739 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.581750 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:44Z","lastTransitionTime":"2025-10-07T00:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.607081 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81a585aaceb493f7f11358e37bc52127d65de5c087d5be00e9b519e42edd2628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.639533 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.684940 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.684988 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.684997 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.685016 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.685028 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:44Z","lastTransitionTime":"2025-10-07T00:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.686741 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.723775 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.761942 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.788548 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.788586 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.788596 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.788611 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.788622 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:44Z","lastTransitionTime":"2025-10-07T00:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.801116 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.844883 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.879463 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.890722 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.890764 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.890775 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.890793 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.890807 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:44Z","lastTransitionTime":"2025-10-07T00:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.922253 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.960754 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:44Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.992946 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.993567 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.993589 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.993617 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:44 crc kubenswrapper[4791]: I1007 00:11:44.993638 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:44Z","lastTransitionTime":"2025-10-07T00:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.006343 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:45Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.041171 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:45Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.068700 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.068758 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.068794 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:45 crc kubenswrapper[4791]: E1007 00:11:45.068889 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:11:45 crc kubenswrapper[4791]: E1007 00:11:45.069386 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:11:45 crc kubenswrapper[4791]: E1007 00:11:45.069493 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.096702 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.096754 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.096765 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.096783 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.096805 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:45Z","lastTransitionTime":"2025-10-07T00:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.199696 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.199727 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.199736 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.199750 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.199760 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:45Z","lastTransitionTime":"2025-10-07T00:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.303048 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.303090 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.303100 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.303119 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.303136 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:45Z","lastTransitionTime":"2025-10-07T00:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.324631 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.405416 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.405449 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.405458 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.405473 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.405485 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:45Z","lastTransitionTime":"2025-10-07T00:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.509373 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.509459 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.509474 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.509499 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.509514 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:45Z","lastTransitionTime":"2025-10-07T00:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.611763 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.611854 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.611875 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.611940 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.611966 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:45Z","lastTransitionTime":"2025-10-07T00:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.715863 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.715936 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.715956 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.715988 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.716007 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:45Z","lastTransitionTime":"2025-10-07T00:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.818793 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.818835 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.818846 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.818865 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.818882 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:45Z","lastTransitionTime":"2025-10-07T00:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.921526 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.921560 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.921567 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.921581 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:45 crc kubenswrapper[4791]: I1007 00:11:45.921589 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:45Z","lastTransitionTime":"2025-10-07T00:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.024504 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.024549 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.024562 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.024580 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.024592 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:46Z","lastTransitionTime":"2025-10-07T00:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.127938 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.127989 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.128003 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.128023 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.128035 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:46Z","lastTransitionTime":"2025-10-07T00:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.230199 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.230259 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.230274 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.230293 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.230305 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:46Z","lastTransitionTime":"2025-10-07T00:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.329775 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovnkube-controller/0.log" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.331929 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.331959 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.331969 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.331983 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.331993 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:46Z","lastTransitionTime":"2025-10-07T00:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.333184 4791 generic.go:334] "Generic (PLEG): container finished" podID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerID="81a585aaceb493f7f11358e37bc52127d65de5c087d5be00e9b519e42edd2628" exitCode=1 Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.333229 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerDied","Data":"81a585aaceb493f7f11358e37bc52127d65de5c087d5be00e9b519e42edd2628"} Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.334279 4791 scope.go:117] "RemoveContainer" containerID="81a585aaceb493f7f11358e37bc52127d65de5c087d5be00e9b519e42edd2628" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.365135 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.378500 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.394376 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.408166 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.433040 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81a585aaceb493f7f11358e37bc52127d65de5c087d5be00e9b519e42edd2628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81a585aaceb493f7f11358e37bc52127d65de5c087d5be00e9b519e42edd2628\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:11:45Z\\\",\\\"message\\\":\\\"nshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.831611 6091 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.831749 6091 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.831879 6091 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.832184 6091 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.833178 6091 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 00:11:45.833248 6091 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 00:11:45.833360 6091 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.434934 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.435001 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.435013 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.435040 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.435052 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:46Z","lastTransitionTime":"2025-10-07T00:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.451697 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.466140 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.481441 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.495789 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.517215 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.537271 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.537310 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.537320 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.537337 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.537347 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:46Z","lastTransitionTime":"2025-10-07T00:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.547663 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.568742 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.586788 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.600973 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.623964 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:46Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.640042 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.640099 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.640123 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.640147 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.640162 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:46Z","lastTransitionTime":"2025-10-07T00:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.743025 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.743069 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.743081 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.743098 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.743118 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:46Z","lastTransitionTime":"2025-10-07T00:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.845725 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.845785 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.845804 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.845830 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.845846 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:46Z","lastTransitionTime":"2025-10-07T00:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.948996 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.949041 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.949054 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.949072 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:46 crc kubenswrapper[4791]: I1007 00:11:46.949084 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:46Z","lastTransitionTime":"2025-10-07T00:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.052477 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.052860 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.052965 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.053044 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.053114 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:47Z","lastTransitionTime":"2025-10-07T00:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.068393 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.068436 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:47 crc kubenswrapper[4791]: E1007 00:11:47.068626 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:11:47 crc kubenswrapper[4791]: E1007 00:11:47.068759 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.068865 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:47 crc kubenswrapper[4791]: E1007 00:11:47.069061 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.156511 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.156608 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.156632 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.156653 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.156666 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:47Z","lastTransitionTime":"2025-10-07T00:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.259283 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.259336 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.259350 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.259372 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.259386 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:47Z","lastTransitionTime":"2025-10-07T00:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.339380 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovnkube-controller/0.log" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.342821 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerStarted","Data":"4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504"} Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.342903 4791 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.361978 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.362054 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.362074 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.362104 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.362123 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:47Z","lastTransitionTime":"2025-10-07T00:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.362528 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.376149 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.389058 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.403472 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.422150 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.437641 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.453839 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.464136 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.464173 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.464189 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.464209 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.464223 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:47Z","lastTransitionTime":"2025-10-07T00:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.467274 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.484387 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.501506 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.511112 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.524234 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.535739 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.555889 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81a585aaceb493f7f11358e37bc52127d65de5c087d5be00e9b519e42edd2628\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:11:45Z\\\",\\\"message\\\":\\\"nshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.831611 6091 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.831749 6091 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.831879 6091 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.832184 6091 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.833178 6091 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 00:11:45.833248 6091 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 00:11:45.833360 6091 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.567141 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.567196 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.567206 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.567223 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.567234 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:47Z","lastTransitionTime":"2025-10-07T00:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.570662 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:47Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.669809 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.669851 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.669862 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.669876 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.669886 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:47Z","lastTransitionTime":"2025-10-07T00:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.772571 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.772630 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.772648 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.772668 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.772682 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:47Z","lastTransitionTime":"2025-10-07T00:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.875213 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.875293 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.875309 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.875331 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.875345 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:47Z","lastTransitionTime":"2025-10-07T00:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.978353 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.978393 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.978422 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.978438 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:47 crc kubenswrapper[4791]: I1007 00:11:47.978449 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:47Z","lastTransitionTime":"2025-10-07T00:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.083954 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.084321 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.084584 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.084615 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.084632 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:48Z","lastTransitionTime":"2025-10-07T00:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.188001 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.188102 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.188127 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.188158 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.188178 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:48Z","lastTransitionTime":"2025-10-07T00:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.291279 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.291323 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.291336 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.291353 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.291365 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:48Z","lastTransitionTime":"2025-10-07T00:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.315679 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7"] Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.316667 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.318683 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.318848 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.328110 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.340532 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.349354 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovnkube-controller/1.log" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.350505 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovnkube-controller/0.log" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.354124 4791 generic.go:334] "Generic (PLEG): container finished" podID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerID="4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504" exitCode=1 Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.354223 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerDied","Data":"4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504"} Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.354344 4791 scope.go:117] "RemoveContainer" containerID="81a585aaceb493f7f11358e37bc52127d65de5c087d5be00e9b519e42edd2628" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.355006 4791 scope.go:117] "RemoveContainer" containerID="4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504" Oct 07 00:11:48 crc kubenswrapper[4791]: E1007 00:11:48.355234 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.369589 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5djmf\" (UniqueName: \"kubernetes.io/projected/efe621da-30c8-444b-9016-a08db40a94eb-kube-api-access-5djmf\") pod \"ovnkube-control-plane-749d76644c-rx5v7\" (UID: \"efe621da-30c8-444b-9016-a08db40a94eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.369710 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/efe621da-30c8-444b-9016-a08db40a94eb-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rx5v7\" (UID: \"efe621da-30c8-444b-9016-a08db40a94eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.369754 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/efe621da-30c8-444b-9016-a08db40a94eb-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rx5v7\" (UID: \"efe621da-30c8-444b-9016-a08db40a94eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.369840 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/efe621da-30c8-444b-9016-a08db40a94eb-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rx5v7\" (UID: \"efe621da-30c8-444b-9016-a08db40a94eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.377156 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.391614 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.393596 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.393633 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.393642 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.393659 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.393672 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:48Z","lastTransitionTime":"2025-10-07T00:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.405434 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.422945 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.436203 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.451863 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.471038 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/efe621da-30c8-444b-9016-a08db40a94eb-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rx5v7\" (UID: \"efe621da-30c8-444b-9016-a08db40a94eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.471110 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/efe621da-30c8-444b-9016-a08db40a94eb-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rx5v7\" (UID: \"efe621da-30c8-444b-9016-a08db40a94eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.471178 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/efe621da-30c8-444b-9016-a08db40a94eb-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rx5v7\" (UID: \"efe621da-30c8-444b-9016-a08db40a94eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.471209 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5djmf\" (UniqueName: \"kubernetes.io/projected/efe621da-30c8-444b-9016-a08db40a94eb-kube-api-access-5djmf\") pod \"ovnkube-control-plane-749d76644c-rx5v7\" (UID: \"efe621da-30c8-444b-9016-a08db40a94eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.472265 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/efe621da-30c8-444b-9016-a08db40a94eb-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rx5v7\" (UID: \"efe621da-30c8-444b-9016-a08db40a94eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.472699 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/efe621da-30c8-444b-9016-a08db40a94eb-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rx5v7\" (UID: \"efe621da-30c8-444b-9016-a08db40a94eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.480870 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.481029 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/efe621da-30c8-444b-9016-a08db40a94eb-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rx5v7\" (UID: \"efe621da-30c8-444b-9016-a08db40a94eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.496254 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.496282 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.496290 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.496305 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.496314 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:48Z","lastTransitionTime":"2025-10-07T00:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.501124 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5djmf\" (UniqueName: \"kubernetes.io/projected/efe621da-30c8-444b-9016-a08db40a94eb-kube-api-access-5djmf\") pod \"ovnkube-control-plane-749d76644c-rx5v7\" (UID: \"efe621da-30c8-444b-9016-a08db40a94eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.512262 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.529644 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.555145 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81a585aaceb493f7f11358e37bc52127d65de5c087d5be00e9b519e42edd2628\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:11:45Z\\\",\\\"message\\\":\\\"nshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.831611 6091 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.831749 6091 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.831879 6091 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.832184 6091 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.833178 6091 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 00:11:45.833248 6091 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 00:11:45.833360 6091 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.571578 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.589626 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.598721 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.598769 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.598780 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.598796 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.598808 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:48Z","lastTransitionTime":"2025-10-07T00:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.607800 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.623372 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.637989 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.639456 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: W1007 00:11:48.653674 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podefe621da_30c8_444b_9016_a08db40a94eb.slice/crio-1d848adb1e13fb8ea6c0a2d343b8a044054b7584a000051e4d3b48dd22eb2b0d WatchSource:0}: Error finding container 1d848adb1e13fb8ea6c0a2d343b8a044054b7584a000051e4d3b48dd22eb2b0d: Status 404 returned error can't find the container with id 1d848adb1e13fb8ea6c0a2d343b8a044054b7584a000051e4d3b48dd22eb2b0d Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.657304 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.675151 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.699018 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81a585aaceb493f7f11358e37bc52127d65de5c087d5be00e9b519e42edd2628\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:11:45Z\\\",\\\"message\\\":\\\"nshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.831611 6091 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.831749 6091 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.831879 6091 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.832184 6091 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 00:11:45.833178 6091 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 00:11:45.833248 6091 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 00:11:45.833360 6091 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:11:47Z\\\",\\\"message\\\":\\\"lt, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"f9232b32-e89f-4c8e-acc4-c6801b70dcb0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.110\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1007 00:11:47.231101 6219 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.702101 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.702141 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.702151 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.702166 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.702182 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:48Z","lastTransitionTime":"2025-10-07T00:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.714972 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.739761 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.756729 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.773209 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.773232 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.773464 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:48 crc kubenswrapper[4791]: E1007 00:11:48.773512 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:04.773467406 +0000 UTC m=+51.369405097 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:11:48 crc kubenswrapper[4791]: E1007 00:11:48.773629 4791 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:11:48 crc kubenswrapper[4791]: E1007 00:11:48.773714 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:12:04.773695592 +0000 UTC m=+51.369633233 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.787062 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.801105 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.805348 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.805392 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.805419 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.805439 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.805449 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:48Z","lastTransitionTime":"2025-10-07T00:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.819343 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.836886 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.857570 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.874367 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.874483 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.874548 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:48 crc kubenswrapper[4791]: E1007 00:11:48.874611 4791 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:11:48 crc kubenswrapper[4791]: E1007 00:11:48.874707 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:12:04.874687187 +0000 UTC m=+51.470624838 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:11:48 crc kubenswrapper[4791]: E1007 00:11:48.874769 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:11:48 crc kubenswrapper[4791]: E1007 00:11:48.874811 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:11:48 crc kubenswrapper[4791]: E1007 00:11:48.874830 4791 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:48 crc kubenswrapper[4791]: E1007 00:11:48.874776 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:11:48 crc kubenswrapper[4791]: E1007 00:11:48.875859 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:11:48 crc kubenswrapper[4791]: E1007 00:11:48.875876 4791 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:48 crc kubenswrapper[4791]: E1007 00:11:48.874921 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 00:12:04.874865902 +0000 UTC m=+51.470803563 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:48 crc kubenswrapper[4791]: E1007 00:11:48.875955 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 00:12:04.875933913 +0000 UTC m=+51.471871564 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.885198 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.908707 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.908755 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.908768 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.908782 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.908792 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:48Z","lastTransitionTime":"2025-10-07T00:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.908742 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:48 crc kubenswrapper[4791]: I1007 00:11:48.924143 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:48Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.011676 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.011723 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.011732 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.011749 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.011761 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:49Z","lastTransitionTime":"2025-10-07T00:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.068970 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.068970 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:49 crc kubenswrapper[4791]: E1007 00:11:49.069132 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.068989 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:49 crc kubenswrapper[4791]: E1007 00:11:49.069360 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:11:49 crc kubenswrapper[4791]: E1007 00:11:49.069460 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.113826 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.113865 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.113874 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.113889 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.113901 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:49Z","lastTransitionTime":"2025-10-07T00:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.216368 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.216449 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.216462 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.216479 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.216492 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:49Z","lastTransitionTime":"2025-10-07T00:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.318619 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.318669 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.318687 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.318713 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.318727 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:49Z","lastTransitionTime":"2025-10-07T00:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.359000 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovnkube-controller/1.log" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.362887 4791 scope.go:117] "RemoveContainer" containerID="4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504" Oct 07 00:11:49 crc kubenswrapper[4791]: E1007 00:11:49.363039 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.365026 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" event={"ID":"efe621da-30c8-444b-9016-a08db40a94eb","Type":"ContainerStarted","Data":"4f04fe39226f20df51d2972a54cb180a6bb40f7791efcb5f5b5e098d4c976065"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.365092 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" event={"ID":"efe621da-30c8-444b-9016-a08db40a94eb","Type":"ContainerStarted","Data":"9c19d2cc105fd387281d8446c46f986b15023eefafee848098b2d6d51e48a35f"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.365108 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" event={"ID":"efe621da-30c8-444b-9016-a08db40a94eb","Type":"ContainerStarted","Data":"1d848adb1e13fb8ea6c0a2d343b8a044054b7584a000051e4d3b48dd22eb2b0d"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.385335 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.397799 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.412050 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.422069 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.422123 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.422135 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.422155 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.422169 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:49Z","lastTransitionTime":"2025-10-07T00:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.426977 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.438078 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.455554 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:11:47Z\\\",\\\"message\\\":\\\"lt, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"f9232b32-e89f-4c8e-acc4-c6801b70dcb0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.110\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1007 00:11:47.231101 6219 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.466192 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-ppklr"] Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.466765 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:49 crc kubenswrapper[4791]: E1007 00:11:49.466841 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.469358 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.494748 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.509189 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.524709 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.524787 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.524801 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.524826 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.524839 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:49Z","lastTransitionTime":"2025-10-07T00:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.526884 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.538296 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.549916 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.562277 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.576385 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.583314 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zt9k2\" (UniqueName: \"kubernetes.io/projected/9e16019f-8b86-49e5-a866-bb10c4c91e44-kube-api-access-zt9k2\") pod \"network-metrics-daemon-ppklr\" (UID: \"9e16019f-8b86-49e5-a866-bb10c4c91e44\") " pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.583392 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs\") pod \"network-metrics-daemon-ppklr\" (UID: \"9e16019f-8b86-49e5-a866-bb10c4c91e44\") " pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.590452 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.606329 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.623450 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.623509 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.623528 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.623560 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.623580 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:49Z","lastTransitionTime":"2025-10-07T00:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.626856 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:11:47Z\\\",\\\"message\\\":\\\"lt, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"f9232b32-e89f-4c8e-acc4-c6801b70dcb0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.110\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1007 00:11:47.231101 6219 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: E1007 00:11:49.638168 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.641816 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.641827 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c19d2cc105fd387281d8446c46f986b15023eefafee848098b2d6d51e48a35f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f04fe39226f20df51d2972a54cb180a6bb40f7791efcb5f5b5e098d4c976065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.641884 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.641908 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.641937 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.641957 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:49Z","lastTransitionTime":"2025-10-07T00:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:49 crc kubenswrapper[4791]: E1007 00:11:49.657271 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.659853 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.661299 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.661341 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.661353 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.661370 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.661380 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:49Z","lastTransitionTime":"2025-10-07T00:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.673829 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: E1007 00:11:49.674157 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.678896 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.678937 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.678948 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.678974 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.678983 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:49Z","lastTransitionTime":"2025-10-07T00:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.684906 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs\") pod \"network-metrics-daemon-ppklr\" (UID: \"9e16019f-8b86-49e5-a866-bb10c4c91e44\") " pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.684993 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zt9k2\" (UniqueName: \"kubernetes.io/projected/9e16019f-8b86-49e5-a866-bb10c4c91e44-kube-api-access-zt9k2\") pod \"network-metrics-daemon-ppklr\" (UID: \"9e16019f-8b86-49e5-a866-bb10c4c91e44\") " pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:49 crc kubenswrapper[4791]: E1007 00:11:49.685140 4791 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 00:11:49 crc kubenswrapper[4791]: E1007 00:11:49.685210 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs podName:9e16019f-8b86-49e5-a866-bb10c4c91e44 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:50.185191862 +0000 UTC m=+36.781129513 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs") pod "network-metrics-daemon-ppklr" (UID: "9e16019f-8b86-49e5-a866-bb10c4c91e44") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.689482 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: E1007 00:11:49.691873 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.695422 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.695549 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.695619 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.695693 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.695758 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:49Z","lastTransitionTime":"2025-10-07T00:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.702225 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.703087 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zt9k2\" (UniqueName: \"kubernetes.io/projected/9e16019f-8b86-49e5-a866-bb10c4c91e44-kube-api-access-zt9k2\") pod \"network-metrics-daemon-ppklr\" (UID: \"9e16019f-8b86-49e5-a866-bb10c4c91e44\") " pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:49 crc kubenswrapper[4791]: E1007 00:11:49.708169 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: E1007 00:11:49.708354 4791 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.711649 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.711688 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.711700 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.711717 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.711730 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:49Z","lastTransitionTime":"2025-10-07T00:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.715768 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.726951 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ppklr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e16019f-8b86-49e5-a866-bb10c4c91e44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ppklr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.744998 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.758085 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.778345 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.793720 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.807669 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.814523 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.814572 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.814584 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.814605 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.814617 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:49Z","lastTransitionTime":"2025-10-07T00:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.823536 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.842143 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.856969 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.867603 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:49Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.917426 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.917491 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.917510 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.917537 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:49 crc kubenswrapper[4791]: I1007 00:11:49.917556 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:49Z","lastTransitionTime":"2025-10-07T00:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.020229 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.020550 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.020738 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.020828 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.020910 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:50Z","lastTransitionTime":"2025-10-07T00:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.123455 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.123494 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.123502 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.123518 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.123528 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:50Z","lastTransitionTime":"2025-10-07T00:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.189592 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs\") pod \"network-metrics-daemon-ppklr\" (UID: \"9e16019f-8b86-49e5-a866-bb10c4c91e44\") " pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:50 crc kubenswrapper[4791]: E1007 00:11:50.189823 4791 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 00:11:50 crc kubenswrapper[4791]: E1007 00:11:50.189881 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs podName:9e16019f-8b86-49e5-a866-bb10c4c91e44 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:51.189866539 +0000 UTC m=+37.785804190 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs") pod "network-metrics-daemon-ppklr" (UID: "9e16019f-8b86-49e5-a866-bb10c4c91e44") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.226689 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.226728 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.226737 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.226754 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.226767 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:50Z","lastTransitionTime":"2025-10-07T00:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.328645 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.328689 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.328711 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.328727 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.328739 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:50Z","lastTransitionTime":"2025-10-07T00:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.431891 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.431926 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.431935 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.431950 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.431959 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:50Z","lastTransitionTime":"2025-10-07T00:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.534538 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.534932 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.534950 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.534967 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.534980 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:50Z","lastTransitionTime":"2025-10-07T00:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.637992 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.638041 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.638051 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.638066 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.638077 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:50Z","lastTransitionTime":"2025-10-07T00:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.741196 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.741280 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.741303 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.741321 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.741332 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:50Z","lastTransitionTime":"2025-10-07T00:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.844700 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.844744 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.844755 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.844771 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.844783 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:50Z","lastTransitionTime":"2025-10-07T00:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.948639 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.948697 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.948710 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.948736 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:50 crc kubenswrapper[4791]: I1007 00:11:50.948751 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:50Z","lastTransitionTime":"2025-10-07T00:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.052019 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.052074 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.052086 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.052105 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.052118 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:51Z","lastTransitionTime":"2025-10-07T00:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.068674 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.068680 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:51 crc kubenswrapper[4791]: E1007 00:11:51.068927 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.068680 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:51 crc kubenswrapper[4791]: E1007 00:11:51.068818 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.068708 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:51 crc kubenswrapper[4791]: E1007 00:11:51.069102 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:11:51 crc kubenswrapper[4791]: E1007 00:11:51.069122 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.154908 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.154957 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.154970 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.154990 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.155001 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:51Z","lastTransitionTime":"2025-10-07T00:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.200065 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs\") pod \"network-metrics-daemon-ppklr\" (UID: \"9e16019f-8b86-49e5-a866-bb10c4c91e44\") " pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:51 crc kubenswrapper[4791]: E1007 00:11:51.200259 4791 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 00:11:51 crc kubenswrapper[4791]: E1007 00:11:51.200365 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs podName:9e16019f-8b86-49e5-a866-bb10c4c91e44 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:53.200342946 +0000 UTC m=+39.796280597 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs") pod "network-metrics-daemon-ppklr" (UID: "9e16019f-8b86-49e5-a866-bb10c4c91e44") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.258260 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.258335 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.258351 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.258370 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.258384 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:51Z","lastTransitionTime":"2025-10-07T00:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.361017 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.361097 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.361106 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.361119 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.361131 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:51Z","lastTransitionTime":"2025-10-07T00:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.463944 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.463988 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.463998 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.464013 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.464028 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:51Z","lastTransitionTime":"2025-10-07T00:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.566731 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.566775 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.566787 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.566803 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.566813 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:51Z","lastTransitionTime":"2025-10-07T00:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.669525 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.669575 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.669587 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.669606 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.669620 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:51Z","lastTransitionTime":"2025-10-07T00:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.772218 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.772270 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.772281 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.772300 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.772319 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:51Z","lastTransitionTime":"2025-10-07T00:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.875413 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.875463 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.875474 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.875493 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.875506 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:51Z","lastTransitionTime":"2025-10-07T00:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.978071 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.978114 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.978125 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.978143 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:51 crc kubenswrapper[4791]: I1007 00:11:51.978159 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:51Z","lastTransitionTime":"2025-10-07T00:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.080150 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.080185 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.080194 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.080209 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.080217 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:52Z","lastTransitionTime":"2025-10-07T00:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.182970 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.183025 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.183036 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.183052 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.183063 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:52Z","lastTransitionTime":"2025-10-07T00:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.285944 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.285978 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.285990 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.286007 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.286018 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:52Z","lastTransitionTime":"2025-10-07T00:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.388071 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.388120 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.388132 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.388154 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.388165 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:52Z","lastTransitionTime":"2025-10-07T00:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.491049 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.491103 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.491118 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.491139 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.491152 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:52Z","lastTransitionTime":"2025-10-07T00:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.593599 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.593669 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.593681 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.593718 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.593728 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:52Z","lastTransitionTime":"2025-10-07T00:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.695995 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.696043 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.696054 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.696074 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.696087 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:52Z","lastTransitionTime":"2025-10-07T00:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.798152 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.798201 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.798211 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.798226 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.798236 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:52Z","lastTransitionTime":"2025-10-07T00:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.901273 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.901328 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.901340 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.901360 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:52 crc kubenswrapper[4791]: I1007 00:11:52.901372 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:52Z","lastTransitionTime":"2025-10-07T00:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.004528 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.004812 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.004919 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.004993 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.005120 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:53Z","lastTransitionTime":"2025-10-07T00:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.068561 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.068607 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.068587 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.068568 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:53 crc kubenswrapper[4791]: E1007 00:11:53.068710 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:11:53 crc kubenswrapper[4791]: E1007 00:11:53.068775 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:11:53 crc kubenswrapper[4791]: E1007 00:11:53.068840 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:11:53 crc kubenswrapper[4791]: E1007 00:11:53.068903 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.107992 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.108041 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.108052 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.108069 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.108078 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:53Z","lastTransitionTime":"2025-10-07T00:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.210634 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.210682 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.210738 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.210765 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.210783 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:53Z","lastTransitionTime":"2025-10-07T00:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.223758 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs\") pod \"network-metrics-daemon-ppklr\" (UID: \"9e16019f-8b86-49e5-a866-bb10c4c91e44\") " pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:53 crc kubenswrapper[4791]: E1007 00:11:53.223903 4791 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 00:11:53 crc kubenswrapper[4791]: E1007 00:11:53.223977 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs podName:9e16019f-8b86-49e5-a866-bb10c4c91e44 nodeName:}" failed. No retries permitted until 2025-10-07 00:11:57.223957476 +0000 UTC m=+43.819895127 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs") pod "network-metrics-daemon-ppklr" (UID: "9e16019f-8b86-49e5-a866-bb10c4c91e44") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.313398 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.313522 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.313555 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.313588 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.313609 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:53Z","lastTransitionTime":"2025-10-07T00:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.416193 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.416246 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.416260 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.416279 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.416290 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:53Z","lastTransitionTime":"2025-10-07T00:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.518924 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.518964 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.518976 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.518993 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.519005 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:53Z","lastTransitionTime":"2025-10-07T00:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.622143 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.622476 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.622563 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.622628 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.622690 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:53Z","lastTransitionTime":"2025-10-07T00:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.724578 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.724628 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.724637 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.724650 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.724660 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:53Z","lastTransitionTime":"2025-10-07T00:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.826952 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.826986 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.826997 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.827013 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.827024 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:53Z","lastTransitionTime":"2025-10-07T00:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.929958 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.929995 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.930007 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.930022 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:53 crc kubenswrapper[4791]: I1007 00:11:53.930033 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:53Z","lastTransitionTime":"2025-10-07T00:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.032252 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.032292 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.032301 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.032318 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.032329 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:54Z","lastTransitionTime":"2025-10-07T00:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.087538 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.100513 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.111108 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.121010 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.133687 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.133720 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.133730 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.133745 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.133755 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:54Z","lastTransitionTime":"2025-10-07T00:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.134275 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.149105 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ppklr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e16019f-8b86-49e5-a866-bb10c4c91e44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ppklr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.162330 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.174660 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.189602 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.199933 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.210247 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.224851 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.236323 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.236385 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.236396 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.236426 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.236440 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:54Z","lastTransitionTime":"2025-10-07T00:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.239646 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.255531 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.267325 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.286301 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:11:47Z\\\",\\\"message\\\":\\\"lt, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"f9232b32-e89f-4c8e-acc4-c6801b70dcb0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.110\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1007 00:11:47.231101 6219 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.297496 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c19d2cc105fd387281d8446c46f986b15023eefafee848098b2d6d51e48a35f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f04fe39226f20df51d2972a54cb180a6bb40f7791efcb5f5b5e098d4c976065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:54Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.339090 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.339132 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.339145 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.339161 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.339174 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:54Z","lastTransitionTime":"2025-10-07T00:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.441597 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.441679 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.441691 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.441716 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.441730 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:54Z","lastTransitionTime":"2025-10-07T00:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.544721 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.544762 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.544771 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.544788 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.544799 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:54Z","lastTransitionTime":"2025-10-07T00:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.647851 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.647908 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.647919 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.647939 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.647949 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:54Z","lastTransitionTime":"2025-10-07T00:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.750666 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.750716 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.750726 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.750742 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.750753 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:54Z","lastTransitionTime":"2025-10-07T00:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.853341 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.853394 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.853422 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.853441 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.853454 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:54Z","lastTransitionTime":"2025-10-07T00:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.956394 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.956520 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.956542 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.956572 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:54 crc kubenswrapper[4791]: I1007 00:11:54.956591 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:54Z","lastTransitionTime":"2025-10-07T00:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.060159 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.060245 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.060293 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.060328 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.060347 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:55Z","lastTransitionTime":"2025-10-07T00:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.068643 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.068725 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.068723 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.068693 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:55 crc kubenswrapper[4791]: E1007 00:11:55.068890 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:11:55 crc kubenswrapper[4791]: E1007 00:11:55.069051 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:11:55 crc kubenswrapper[4791]: E1007 00:11:55.069172 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:11:55 crc kubenswrapper[4791]: E1007 00:11:55.069275 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.163502 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.163680 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.163703 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.163731 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.163751 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:55Z","lastTransitionTime":"2025-10-07T00:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.266462 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.266515 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.266525 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.266542 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.266553 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:55Z","lastTransitionTime":"2025-10-07T00:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.370334 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.370388 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.370398 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.370437 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.370450 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:55Z","lastTransitionTime":"2025-10-07T00:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.473544 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.473592 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.473603 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.473623 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.473637 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:55Z","lastTransitionTime":"2025-10-07T00:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.576576 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.576640 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.576653 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.576674 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.576687 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:55Z","lastTransitionTime":"2025-10-07T00:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.678844 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.678888 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.678900 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.678918 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.678931 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:55Z","lastTransitionTime":"2025-10-07T00:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.781830 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.781875 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.781883 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.781899 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.781910 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:55Z","lastTransitionTime":"2025-10-07T00:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.884282 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.884335 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.884344 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.884360 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.884370 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:55Z","lastTransitionTime":"2025-10-07T00:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.987008 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.987059 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.987069 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.987086 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:55 crc kubenswrapper[4791]: I1007 00:11:55.987097 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:55Z","lastTransitionTime":"2025-10-07T00:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.089942 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.090002 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.090017 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.090036 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.090049 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:56Z","lastTransitionTime":"2025-10-07T00:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.193098 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.193201 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.193216 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.193236 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.193248 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:56Z","lastTransitionTime":"2025-10-07T00:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.295796 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.295866 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.295875 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.295891 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.295901 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:56Z","lastTransitionTime":"2025-10-07T00:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.398505 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.398554 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.398566 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.398580 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.398590 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:56Z","lastTransitionTime":"2025-10-07T00:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.501537 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.501578 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.501589 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.501604 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.501614 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:56Z","lastTransitionTime":"2025-10-07T00:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.513983 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.514915 4791 scope.go:117] "RemoveContainer" containerID="4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504" Oct 07 00:11:56 crc kubenswrapper[4791]: E1007 00:11:56.515093 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.603557 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.603601 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.603612 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.603628 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.603640 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:56Z","lastTransitionTime":"2025-10-07T00:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.706044 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.706089 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.706100 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.706117 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.706132 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:56Z","lastTransitionTime":"2025-10-07T00:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.807900 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.807964 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.807977 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.807994 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.808005 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:56Z","lastTransitionTime":"2025-10-07T00:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.910419 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.910460 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.910475 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.910492 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:56 crc kubenswrapper[4791]: I1007 00:11:56.910503 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:56Z","lastTransitionTime":"2025-10-07T00:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.013166 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.013225 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.013236 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.013252 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.013264 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:57Z","lastTransitionTime":"2025-10-07T00:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.068751 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.068836 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:57 crc kubenswrapper[4791]: E1007 00:11:57.068893 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:11:57 crc kubenswrapper[4791]: E1007 00:11:57.068986 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.069077 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.069112 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:57 crc kubenswrapper[4791]: E1007 00:11:57.069178 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:11:57 crc kubenswrapper[4791]: E1007 00:11:57.069237 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.115517 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.115553 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.115561 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.115600 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.115611 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:57Z","lastTransitionTime":"2025-10-07T00:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.218043 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.218087 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.218097 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.218113 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.218123 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:57Z","lastTransitionTime":"2025-10-07T00:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.267737 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs\") pod \"network-metrics-daemon-ppklr\" (UID: \"9e16019f-8b86-49e5-a866-bb10c4c91e44\") " pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:57 crc kubenswrapper[4791]: E1007 00:11:57.267924 4791 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 00:11:57 crc kubenswrapper[4791]: E1007 00:11:57.267988 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs podName:9e16019f-8b86-49e5-a866-bb10c4c91e44 nodeName:}" failed. No retries permitted until 2025-10-07 00:12:05.267973325 +0000 UTC m=+51.863910976 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs") pod "network-metrics-daemon-ppklr" (UID: "9e16019f-8b86-49e5-a866-bb10c4c91e44") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.320582 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.320628 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.320639 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.320657 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.320668 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:57Z","lastTransitionTime":"2025-10-07T00:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.424023 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.424072 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.424082 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.424102 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.424114 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:57Z","lastTransitionTime":"2025-10-07T00:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.528237 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.528299 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.528311 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.528334 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.528346 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:57Z","lastTransitionTime":"2025-10-07T00:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.631985 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.632067 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.632150 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.632194 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.632224 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:57Z","lastTransitionTime":"2025-10-07T00:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.736180 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.736228 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.736238 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.736255 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.736265 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:57Z","lastTransitionTime":"2025-10-07T00:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.839942 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.840044 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.840080 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.840116 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.840141 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:57Z","lastTransitionTime":"2025-10-07T00:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.943621 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.943739 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.943753 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.943771 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:57 crc kubenswrapper[4791]: I1007 00:11:57.943783 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:57Z","lastTransitionTime":"2025-10-07T00:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.046350 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.046433 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.046443 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.046459 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.046469 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:58Z","lastTransitionTime":"2025-10-07T00:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.149793 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.149864 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.149879 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.149902 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.149916 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:58Z","lastTransitionTime":"2025-10-07T00:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.252480 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.252550 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.252563 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.252581 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.252593 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:58Z","lastTransitionTime":"2025-10-07T00:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.355634 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.355683 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.355691 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.355708 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.355719 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:58Z","lastTransitionTime":"2025-10-07T00:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.458318 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.458363 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.458372 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.458419 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.458436 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:58Z","lastTransitionTime":"2025-10-07T00:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.560951 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.561020 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.561033 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.561053 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.561066 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:58Z","lastTransitionTime":"2025-10-07T00:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.664357 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.664423 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.664439 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.664461 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.664473 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:58Z","lastTransitionTime":"2025-10-07T00:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.767418 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.767473 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.767487 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.767513 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.767527 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:58Z","lastTransitionTime":"2025-10-07T00:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.870113 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.870146 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.870154 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.870168 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.870177 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:58Z","lastTransitionTime":"2025-10-07T00:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.972972 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.973030 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.973043 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.973060 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:58 crc kubenswrapper[4791]: I1007 00:11:58.973071 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:58Z","lastTransitionTime":"2025-10-07T00:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.068558 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.068593 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:11:59 crc kubenswrapper[4791]: E1007 00:11:59.068706 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.068717 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.068780 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:11:59 crc kubenswrapper[4791]: E1007 00:11:59.068883 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:11:59 crc kubenswrapper[4791]: E1007 00:11:59.068929 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:11:59 crc kubenswrapper[4791]: E1007 00:11:59.068989 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.075660 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.075693 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.075701 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.075715 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.075727 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.177588 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.177630 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.177638 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.177653 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.177662 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.280427 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.280472 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.280504 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.280524 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.280539 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.383299 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.383345 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.383360 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.383379 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.383420 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.485235 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.485273 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.485281 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.485298 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.485308 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.588349 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.588391 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.588426 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.588442 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.588452 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.691113 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.691174 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.691188 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.691212 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.691228 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.793370 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.793440 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.793449 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.793465 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.793476 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.794648 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.794745 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.794759 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.794773 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.794783 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: E1007 00:11:59.807647 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:59Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.811486 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.811539 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.811553 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.811573 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.811585 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: E1007 00:11:59.825560 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:59Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.830735 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.830790 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.830803 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.830819 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.830830 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: E1007 00:11:59.849531 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:59Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.853487 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.853527 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.853538 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.853554 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.853566 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: E1007 00:11:59.867149 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:59Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.871037 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.871087 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.871100 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.871119 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.871132 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: E1007 00:11:59.888731 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:11:59Z is after 2025-08-24T17:21:41Z" Oct 07 00:11:59 crc kubenswrapper[4791]: E1007 00:11:59.888917 4791 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.896866 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.896914 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.896928 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.896947 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.896957 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.999737 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.999789 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.999802 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:11:59 crc kubenswrapper[4791]: I1007 00:11:59.999821 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:11:59.999834 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:11:59Z","lastTransitionTime":"2025-10-07T00:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.102122 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.102193 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.102205 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.102222 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.102233 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:00Z","lastTransitionTime":"2025-10-07T00:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.205078 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.205126 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.205137 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.205155 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.205174 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:00Z","lastTransitionTime":"2025-10-07T00:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.307284 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.307314 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.307323 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.307336 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.307344 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:00Z","lastTransitionTime":"2025-10-07T00:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.408973 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.409029 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.409039 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.409053 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.409062 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:00Z","lastTransitionTime":"2025-10-07T00:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.511514 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.511543 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.511552 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.511566 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.511576 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:00Z","lastTransitionTime":"2025-10-07T00:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.614043 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.614087 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.614102 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.614123 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.614134 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:00Z","lastTransitionTime":"2025-10-07T00:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.716792 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.716835 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.716846 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.716864 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.716873 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:00Z","lastTransitionTime":"2025-10-07T00:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.819277 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.819345 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.819358 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.819377 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.819391 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:00Z","lastTransitionTime":"2025-10-07T00:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.921582 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.921622 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.921630 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.921643 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:00 crc kubenswrapper[4791]: I1007 00:12:00.921652 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:00Z","lastTransitionTime":"2025-10-07T00:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.024445 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.024483 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.024492 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.024507 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.024517 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:01Z","lastTransitionTime":"2025-10-07T00:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.068920 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.068992 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.068944 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.068920 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:01 crc kubenswrapper[4791]: E1007 00:12:01.069129 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:01 crc kubenswrapper[4791]: E1007 00:12:01.069291 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:01 crc kubenswrapper[4791]: E1007 00:12:01.069392 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:01 crc kubenswrapper[4791]: E1007 00:12:01.069521 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.127026 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.127077 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.127091 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.127110 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.127122 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:01Z","lastTransitionTime":"2025-10-07T00:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.229955 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.229999 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.230007 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.230023 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.230033 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:01Z","lastTransitionTime":"2025-10-07T00:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.332316 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.332373 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.332383 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.332419 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.332432 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:01Z","lastTransitionTime":"2025-10-07T00:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.435532 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.435578 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.435588 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.435602 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.435612 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:01Z","lastTransitionTime":"2025-10-07T00:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.538381 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.538468 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.538478 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.538495 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.538506 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:01Z","lastTransitionTime":"2025-10-07T00:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.641091 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.641136 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.641145 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.641161 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.641171 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:01Z","lastTransitionTime":"2025-10-07T00:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.743889 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.743946 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.743955 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.743981 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.744053 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:01Z","lastTransitionTime":"2025-10-07T00:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.846454 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.846493 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.846501 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.846516 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.846526 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:01Z","lastTransitionTime":"2025-10-07T00:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.949463 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.949503 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.949511 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.949525 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:01 crc kubenswrapper[4791]: I1007 00:12:01.949535 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:01Z","lastTransitionTime":"2025-10-07T00:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.051543 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.051877 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.051962 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.052037 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.052123 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:02Z","lastTransitionTime":"2025-10-07T00:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.155594 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.155656 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.155669 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.155692 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.155710 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:02Z","lastTransitionTime":"2025-10-07T00:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.258313 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.258358 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.258371 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.258391 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.258436 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:02Z","lastTransitionTime":"2025-10-07T00:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.361047 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.361096 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.361139 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.361155 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.361166 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:02Z","lastTransitionTime":"2025-10-07T00:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.463637 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.463673 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.463685 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.463700 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.463711 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:02Z","lastTransitionTime":"2025-10-07T00:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.566547 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.566592 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.566608 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.566625 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.566636 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:02Z","lastTransitionTime":"2025-10-07T00:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.669260 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.669292 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.669300 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.669513 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.669523 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:02Z","lastTransitionTime":"2025-10-07T00:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.773077 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.773128 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.773138 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.773156 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.773167 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:02Z","lastTransitionTime":"2025-10-07T00:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.876182 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.876219 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.876237 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.876259 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.876269 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:02Z","lastTransitionTime":"2025-10-07T00:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.979104 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.979157 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.979166 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.979183 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:02 crc kubenswrapper[4791]: I1007 00:12:02.979193 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:02Z","lastTransitionTime":"2025-10-07T00:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.069006 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.069075 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.069008 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:03 crc kubenswrapper[4791]: E1007 00:12:03.069142 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:03 crc kubenswrapper[4791]: E1007 00:12:03.069224 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.069026 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:03 crc kubenswrapper[4791]: E1007 00:12:03.069336 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:03 crc kubenswrapper[4791]: E1007 00:12:03.069461 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.081356 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.081395 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.081424 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.081442 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.081451 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:03Z","lastTransitionTime":"2025-10-07T00:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.184628 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.184707 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.184770 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.184791 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.184807 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:03Z","lastTransitionTime":"2025-10-07T00:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.287178 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.287219 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.287227 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.287241 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.287250 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:03Z","lastTransitionTime":"2025-10-07T00:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.390204 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.390244 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.390253 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.390269 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.390279 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:03Z","lastTransitionTime":"2025-10-07T00:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.493282 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.493319 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.493329 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.493344 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.493354 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:03Z","lastTransitionTime":"2025-10-07T00:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.595919 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.595967 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.595977 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.595992 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.596003 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:03Z","lastTransitionTime":"2025-10-07T00:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.698943 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.698995 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.699007 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.699024 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.699034 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:03Z","lastTransitionTime":"2025-10-07T00:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.801751 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.801804 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.801817 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.801834 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.801845 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:03Z","lastTransitionTime":"2025-10-07T00:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.903867 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.903909 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.903918 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.903933 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:03 crc kubenswrapper[4791]: I1007 00:12:03.903943 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:03Z","lastTransitionTime":"2025-10-07T00:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.006829 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.006899 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.006913 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.006937 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.006946 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:04Z","lastTransitionTime":"2025-10-07T00:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.082770 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.093230 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.105025 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.109188 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.109214 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.109222 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.109235 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.109244 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:04Z","lastTransitionTime":"2025-10-07T00:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.127243 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:11:47Z\\\",\\\"message\\\":\\\"lt, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"f9232b32-e89f-4c8e-acc4-c6801b70dcb0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.110\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1007 00:11:47.231101 6219 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.144797 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c19d2cc105fd387281d8446c46f986b15023eefafee848098b2d6d51e48a35f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f04fe39226f20df51d2972a54cb180a6bb40f7791efcb5f5b5e098d4c976065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.157311 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.170064 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.182891 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.194072 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.205545 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.211738 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.211785 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.211795 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.211811 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.211823 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:04Z","lastTransitionTime":"2025-10-07T00:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.215817 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ppklr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e16019f-8b86-49e5-a866-bb10c4c91e44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ppklr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.236314 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.253009 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.268295 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.280356 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.292580 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.306374 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:04Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.314206 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.314247 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.314259 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.314276 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.314288 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:04Z","lastTransitionTime":"2025-10-07T00:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.416026 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.416073 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.416082 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.416101 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.416110 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:04Z","lastTransitionTime":"2025-10-07T00:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.517968 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.518011 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.518022 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.518038 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.518051 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:04Z","lastTransitionTime":"2025-10-07T00:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.620458 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.620504 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.620516 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.620532 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.620543 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:04Z","lastTransitionTime":"2025-10-07T00:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.723445 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.723522 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.723542 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.723577 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.723597 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:04Z","lastTransitionTime":"2025-10-07T00:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.826504 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.826580 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.826600 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.826649 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.826668 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:04Z","lastTransitionTime":"2025-10-07T00:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.850952 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:04 crc kubenswrapper[4791]: E1007 00:12:04.851089 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:36.851057209 +0000 UTC m=+83.446994890 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.851206 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:04 crc kubenswrapper[4791]: E1007 00:12:04.851452 4791 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:12:04 crc kubenswrapper[4791]: E1007 00:12:04.851523 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:12:36.851510762 +0000 UTC m=+83.447448423 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.929577 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.929660 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.929680 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.929710 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.929734 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:04Z","lastTransitionTime":"2025-10-07T00:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.952145 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.952214 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:04 crc kubenswrapper[4791]: I1007 00:12:04.952251 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:04 crc kubenswrapper[4791]: E1007 00:12:04.952393 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:12:04 crc kubenswrapper[4791]: E1007 00:12:04.952418 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:12:04 crc kubenswrapper[4791]: E1007 00:12:04.952442 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:12:04 crc kubenswrapper[4791]: E1007 00:12:04.952447 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:12:04 crc kubenswrapper[4791]: E1007 00:12:04.952454 4791 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:12:04 crc kubenswrapper[4791]: E1007 00:12:04.952459 4791 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:12:04 crc kubenswrapper[4791]: E1007 00:12:04.952518 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 00:12:36.952500376 +0000 UTC m=+83.548438027 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:12:04 crc kubenswrapper[4791]: E1007 00:12:04.952584 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:12:36.952545927 +0000 UTC m=+83.548483618 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:12:04 crc kubenswrapper[4791]: E1007 00:12:04.952468 4791 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:12:04 crc kubenswrapper[4791]: E1007 00:12:04.952672 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 00:12:36.952656701 +0000 UTC m=+83.548594592 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.032692 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.032743 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.032757 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.032779 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.032796 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:05Z","lastTransitionTime":"2025-10-07T00:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.068621 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:05 crc kubenswrapper[4791]: E1007 00:12:05.069107 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.069185 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.069216 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.069203 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:05 crc kubenswrapper[4791]: E1007 00:12:05.069320 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:05 crc kubenswrapper[4791]: E1007 00:12:05.069419 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:05 crc kubenswrapper[4791]: E1007 00:12:05.069528 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.136053 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.136103 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.136116 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.136137 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.136149 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:05Z","lastTransitionTime":"2025-10-07T00:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.239668 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.239713 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.239724 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.239800 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.239814 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:05Z","lastTransitionTime":"2025-10-07T00:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.342570 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.342612 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.342624 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.342640 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.342651 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:05Z","lastTransitionTime":"2025-10-07T00:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.358296 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs\") pod \"network-metrics-daemon-ppklr\" (UID: \"9e16019f-8b86-49e5-a866-bb10c4c91e44\") " pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:05 crc kubenswrapper[4791]: E1007 00:12:05.358510 4791 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 00:12:05 crc kubenswrapper[4791]: E1007 00:12:05.358595 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs podName:9e16019f-8b86-49e5-a866-bb10c4c91e44 nodeName:}" failed. No retries permitted until 2025-10-07 00:12:21.358573587 +0000 UTC m=+67.954511248 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs") pod "network-metrics-daemon-ppklr" (UID: "9e16019f-8b86-49e5-a866-bb10c4c91e44") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.446148 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.446208 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.446221 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.446239 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.446253 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:05Z","lastTransitionTime":"2025-10-07T00:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.551813 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.551849 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.551876 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.551891 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.551901 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:05Z","lastTransitionTime":"2025-10-07T00:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.654350 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.654425 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.654436 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.654455 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.654467 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:05Z","lastTransitionTime":"2025-10-07T00:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.758009 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.758056 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.758079 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.758101 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.758115 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:05Z","lastTransitionTime":"2025-10-07T00:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.864717 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.864763 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.864774 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.864792 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.864805 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:05Z","lastTransitionTime":"2025-10-07T00:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.967070 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.967117 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.967127 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.967144 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:05 crc kubenswrapper[4791]: I1007 00:12:05.967155 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:05Z","lastTransitionTime":"2025-10-07T00:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.070895 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.070939 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.070955 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.070989 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.071006 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:06Z","lastTransitionTime":"2025-10-07T00:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.173547 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.173621 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.173636 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.173709 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.173731 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:06Z","lastTransitionTime":"2025-10-07T00:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.275691 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.275743 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.275756 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.275773 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.275784 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:06Z","lastTransitionTime":"2025-10-07T00:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.378868 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.378920 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.378930 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.378950 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.378962 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:06Z","lastTransitionTime":"2025-10-07T00:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.481619 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.481699 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.481721 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.481751 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.481771 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:06Z","lastTransitionTime":"2025-10-07T00:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.584756 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.584802 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.584814 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.584832 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.584844 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:06Z","lastTransitionTime":"2025-10-07T00:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.686955 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.686992 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.687001 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.687017 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.687028 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:06Z","lastTransitionTime":"2025-10-07T00:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.789319 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.789362 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.789373 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.789389 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.789417 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:06Z","lastTransitionTime":"2025-10-07T00:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.893524 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.893596 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.893620 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.893651 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.893682 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:06Z","lastTransitionTime":"2025-10-07T00:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.996086 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.996123 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.996131 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.996145 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:06 crc kubenswrapper[4791]: I1007 00:12:06.996156 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:06Z","lastTransitionTime":"2025-10-07T00:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.068233 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.068273 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.068241 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.068233 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:07 crc kubenswrapper[4791]: E1007 00:12:07.068380 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:07 crc kubenswrapper[4791]: E1007 00:12:07.068530 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:07 crc kubenswrapper[4791]: E1007 00:12:07.068645 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:07 crc kubenswrapper[4791]: E1007 00:12:07.068772 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.098413 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.098455 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.098464 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.098497 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.098511 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:07Z","lastTransitionTime":"2025-10-07T00:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.200539 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.200627 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.200643 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.200660 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.200671 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:07Z","lastTransitionTime":"2025-10-07T00:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.304103 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.304175 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.304213 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.304266 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.304290 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:07Z","lastTransitionTime":"2025-10-07T00:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.407047 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.407087 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.407096 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.407111 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.407122 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:07Z","lastTransitionTime":"2025-10-07T00:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.509270 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.509330 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.509384 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.509439 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.509467 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:07Z","lastTransitionTime":"2025-10-07T00:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.611609 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.611659 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.611669 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.611686 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.611698 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:07Z","lastTransitionTime":"2025-10-07T00:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.662478 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.673972 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.677122 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.690222 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.706230 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.714270 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.714326 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.714342 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.714367 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.714386 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:07Z","lastTransitionTime":"2025-10-07T00:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.721585 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.732636 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.746553 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.759940 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.774613 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.789955 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.808089 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:11:47Z\\\",\\\"message\\\":\\\"lt, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"f9232b32-e89f-4c8e-acc4-c6801b70dcb0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.110\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1007 00:11:47.231101 6219 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.816388 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.816452 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.816465 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.816486 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.816501 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:07Z","lastTransitionTime":"2025-10-07T00:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.819994 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c19d2cc105fd387281d8446c46f986b15023eefafee848098b2d6d51e48a35f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f04fe39226f20df51d2972a54cb180a6bb40f7791efcb5f5b5e098d4c976065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.838515 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.851058 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.862343 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.872834 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.885791 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.896736 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ppklr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e16019f-8b86-49e5-a866-bb10c4c91e44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ppklr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:07Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.918793 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.918834 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.918843 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.918861 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:07 crc kubenswrapper[4791]: I1007 00:12:07.918874 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:07Z","lastTransitionTime":"2025-10-07T00:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.021043 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.021084 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.021095 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.021147 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.021161 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:08Z","lastTransitionTime":"2025-10-07T00:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.123424 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.123459 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.123468 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.123482 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.123492 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:08Z","lastTransitionTime":"2025-10-07T00:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.226326 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.226472 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.226483 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.226499 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.226509 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:08Z","lastTransitionTime":"2025-10-07T00:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.329091 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.329143 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.329154 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.329172 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.329183 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:08Z","lastTransitionTime":"2025-10-07T00:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.430810 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.430859 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.430868 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.430885 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.430896 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:08Z","lastTransitionTime":"2025-10-07T00:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.533239 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.533293 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.533305 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.533325 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.533340 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:08Z","lastTransitionTime":"2025-10-07T00:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.635900 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.635946 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.635956 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.635972 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.635982 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:08Z","lastTransitionTime":"2025-10-07T00:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.738417 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.738463 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.738472 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.738487 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.738499 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:08Z","lastTransitionTime":"2025-10-07T00:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.841608 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.841687 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.841702 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.841731 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.841750 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:08Z","lastTransitionTime":"2025-10-07T00:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.945067 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.945129 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.945143 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.945162 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:08 crc kubenswrapper[4791]: I1007 00:12:08.945175 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:08Z","lastTransitionTime":"2025-10-07T00:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.047987 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.048041 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.048057 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.048079 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.048094 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:09Z","lastTransitionTime":"2025-10-07T00:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.068314 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.068381 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.068456 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.068520 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:09 crc kubenswrapper[4791]: E1007 00:12:09.068569 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:09 crc kubenswrapper[4791]: E1007 00:12:09.068700 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:09 crc kubenswrapper[4791]: E1007 00:12:09.068823 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:09 crc kubenswrapper[4791]: E1007 00:12:09.068927 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.150838 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.150910 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.150924 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.150947 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.150959 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:09Z","lastTransitionTime":"2025-10-07T00:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.253428 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.253474 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.253486 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.253502 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.253515 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:09Z","lastTransitionTime":"2025-10-07T00:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.356153 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.356208 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.356223 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.356247 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.356258 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:09Z","lastTransitionTime":"2025-10-07T00:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.458845 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.458896 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.458907 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.458924 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.458936 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:09Z","lastTransitionTime":"2025-10-07T00:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.561781 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.561823 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.561834 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.561851 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.561862 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:09Z","lastTransitionTime":"2025-10-07T00:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.664345 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.664380 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.664389 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.664417 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.664427 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:09Z","lastTransitionTime":"2025-10-07T00:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.767230 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.767276 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.767284 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.767299 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.767310 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:09Z","lastTransitionTime":"2025-10-07T00:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.870632 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.870695 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.870707 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.870728 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.870746 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:09Z","lastTransitionTime":"2025-10-07T00:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.912479 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.912512 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.912522 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.912537 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.912547 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:09Z","lastTransitionTime":"2025-10-07T00:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:09 crc kubenswrapper[4791]: E1007 00:12:09.927228 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:09Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.931689 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.931743 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.931753 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.931773 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.931787 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:09Z","lastTransitionTime":"2025-10-07T00:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:09 crc kubenswrapper[4791]: E1007 00:12:09.947312 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:09Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.951489 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.951538 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.951550 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.951566 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.951578 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:09Z","lastTransitionTime":"2025-10-07T00:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:09 crc kubenswrapper[4791]: E1007 00:12:09.966320 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:09Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.970369 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.970428 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.970441 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.970458 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.970471 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:09Z","lastTransitionTime":"2025-10-07T00:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:09 crc kubenswrapper[4791]: E1007 00:12:09.985246 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:09Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.989078 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.989137 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.989148 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.989163 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:09 crc kubenswrapper[4791]: I1007 00:12:09.989172 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:09Z","lastTransitionTime":"2025-10-07T00:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:10 crc kubenswrapper[4791]: E1007 00:12:10.005859 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: E1007 00:12:10.006036 4791 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.007651 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.007702 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.007714 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.007732 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.007747 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:10Z","lastTransitionTime":"2025-10-07T00:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.069129 4791 scope.go:117] "RemoveContainer" containerID="4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.110804 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.111142 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.111250 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.111528 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.111756 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:10Z","lastTransitionTime":"2025-10-07T00:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.214859 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.215323 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.215339 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.215359 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.215373 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:10Z","lastTransitionTime":"2025-10-07T00:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.317889 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.317938 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.317950 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.317969 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.317981 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:10Z","lastTransitionTime":"2025-10-07T00:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.421106 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.421147 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.421156 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.421172 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.421182 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:10Z","lastTransitionTime":"2025-10-07T00:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.431353 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovnkube-controller/1.log" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.434313 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerStarted","Data":"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3"} Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.434815 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.453799 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.464847 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.477589 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.493986 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.505313 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.516167 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ppklr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e16019f-8b86-49e5-a866-bb10c4c91e44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ppklr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.523639 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.523689 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.523698 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.523713 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.523727 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:10Z","lastTransitionTime":"2025-10-07T00:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.527534 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.540007 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.552523 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.567336 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.581276 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.591287 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.605090 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d81d0b5-a614-4621-8bef-837b8a5c631c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f15d5cf1154d171f7e12462edcb9b53d881f545c083ac5fb3334ab7451b2559d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2186ca7217fced1fc3359283d525790e52592c811c1c71faa1d1eca99c9294e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e23b288d4d32b7fdb7734780996ad0f774af3a20123538f7c86311f56044e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.623112 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.625740 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.625780 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.625791 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.625806 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.625817 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:10Z","lastTransitionTime":"2025-10-07T00:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.635858 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.647291 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.666913 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:11:47Z\\\",\\\"message\\\":\\\"lt, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"f9232b32-e89f-4c8e-acc4-c6801b70dcb0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.110\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1007 00:11:47.231101 6219 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:12:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.679785 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c19d2cc105fd387281d8446c46f986b15023eefafee848098b2d6d51e48a35f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f04fe39226f20df51d2972a54cb180a6bb40f7791efcb5f5b5e098d4c976065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.727836 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.727889 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.727900 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.727922 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.727935 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:10Z","lastTransitionTime":"2025-10-07T00:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.830874 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.830931 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.830945 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.830965 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.830979 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:10Z","lastTransitionTime":"2025-10-07T00:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.934068 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.934119 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.934130 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.934149 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:10 crc kubenswrapper[4791]: I1007 00:12:10.934161 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:10Z","lastTransitionTime":"2025-10-07T00:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.036340 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.036384 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.036393 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.036424 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.036435 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:11Z","lastTransitionTime":"2025-10-07T00:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.068749 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:11 crc kubenswrapper[4791]: E1007 00:12:11.068898 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.068954 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.069063 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:11 crc kubenswrapper[4791]: E1007 00:12:11.069075 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.069108 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:11 crc kubenswrapper[4791]: E1007 00:12:11.069292 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:11 crc kubenswrapper[4791]: E1007 00:12:11.069447 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.138981 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.139019 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.139030 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.139045 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.139057 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:11Z","lastTransitionTime":"2025-10-07T00:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.241688 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.241784 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.241803 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.241826 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.241844 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:11Z","lastTransitionTime":"2025-10-07T00:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.344242 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.344302 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.344316 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.344334 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.344348 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:11Z","lastTransitionTime":"2025-10-07T00:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.440439 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovnkube-controller/2.log" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.441169 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovnkube-controller/1.log" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.444305 4791 generic.go:334] "Generic (PLEG): container finished" podID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerID="296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3" exitCode=1 Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.444365 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerDied","Data":"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3"} Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.444441 4791 scope.go:117] "RemoveContainer" containerID="4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.445140 4791 scope.go:117] "RemoveContainer" containerID="296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3" Oct 07 00:12:11 crc kubenswrapper[4791]: E1007 00:12:11.445330 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.445821 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.445856 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.445881 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.445898 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.445912 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:11Z","lastTransitionTime":"2025-10-07T00:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.473756 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.490781 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.505662 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.520106 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.533736 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.547365 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ppklr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e16019f-8b86-49e5-a866-bb10c4c91e44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ppklr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.547979 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.548024 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.548035 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.548054 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.548069 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:11Z","lastTransitionTime":"2025-10-07T00:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.563563 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.579512 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.593763 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.608629 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.625508 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.637469 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.650816 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.650878 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.650892 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.650914 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.650929 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:11Z","lastTransitionTime":"2025-10-07T00:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.652909 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d81d0b5-a614-4621-8bef-837b8a5c631c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f15d5cf1154d171f7e12462edcb9b53d881f545c083ac5fb3334ab7451b2559d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2186ca7217fced1fc3359283d525790e52592c811c1c71faa1d1eca99c9294e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e23b288d4d32b7fdb7734780996ad0f774af3a20123538f7c86311f56044e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.666812 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.679303 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.689397 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.712317 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c04ce54ffd3197481e3df2fe948f861ce1392024b1347661e0b324b4654b504\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:11:47Z\\\",\\\"message\\\":\\\"lt, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"f9232b32-e89f-4c8e-acc4-c6801b70dcb0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/package-server-manager-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.110\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1007 00:11:47.231101 6219 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:12:10Z\\\",\\\"message\\\":\\\"curred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z]\\\\nI1007 00:12:10.862042 6512 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-ppklr in node crc\\\\nI1007 00:12:10.862044 6512 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1007 00:12:10.862058 6512 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1007 00:12:10.861854 6512 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"e4e4203e-87c7-4024-930a-5d6bdfe2bdde\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:12:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.724117 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c19d2cc105fd387281d8446c46f986b15023eefafee848098b2d6d51e48a35f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f04fe39226f20df51d2972a54cb180a6bb40f7791efcb5f5b5e098d4c976065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:11Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.753636 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.753699 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.753718 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.753736 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.753747 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:11Z","lastTransitionTime":"2025-10-07T00:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.855897 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.855947 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.855958 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.855976 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.855986 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:11Z","lastTransitionTime":"2025-10-07T00:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.957767 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.957807 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.957817 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.957833 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:11 crc kubenswrapper[4791]: I1007 00:12:11.957844 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:11Z","lastTransitionTime":"2025-10-07T00:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.060387 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.060433 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.060441 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.060458 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.060467 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:12Z","lastTransitionTime":"2025-10-07T00:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.162811 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.162852 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.162862 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.162878 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.162887 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:12Z","lastTransitionTime":"2025-10-07T00:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.265239 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.265275 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.265284 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.265307 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.265316 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:12Z","lastTransitionTime":"2025-10-07T00:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.368246 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.368287 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.368297 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.368313 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.368323 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:12Z","lastTransitionTime":"2025-10-07T00:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.449022 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovnkube-controller/2.log" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.452315 4791 scope.go:117] "RemoveContainer" containerID="296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3" Oct 07 00:12:12 crc kubenswrapper[4791]: E1007 00:12:12.452513 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.464685 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d81d0b5-a614-4621-8bef-837b8a5c631c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f15d5cf1154d171f7e12462edcb9b53d881f545c083ac5fb3334ab7451b2559d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2186ca7217fced1fc3359283d525790e52592c811c1c71faa1d1eca99c9294e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e23b288d4d32b7fdb7734780996ad0f774af3a20123538f7c86311f56044e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.470577 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.470632 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.470645 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.470661 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.470673 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:12Z","lastTransitionTime":"2025-10-07T00:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.478030 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.491636 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.502631 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.519352 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:12:10Z\\\",\\\"message\\\":\\\"curred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z]\\\\nI1007 00:12:10.862042 6512 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-ppklr in node crc\\\\nI1007 00:12:10.862044 6512 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1007 00:12:10.862058 6512 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1007 00:12:10.861854 6512 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"e4e4203e-87c7-4024-930a-5d6bdfe2bdde\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:12:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.530118 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c19d2cc105fd387281d8446c46f986b15023eefafee848098b2d6d51e48a35f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f04fe39226f20df51d2972a54cb180a6bb40f7791efcb5f5b5e098d4c976065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.547723 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.560025 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.570929 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.572697 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.572742 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.572756 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.572777 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.572792 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:12Z","lastTransitionTime":"2025-10-07T00:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.580923 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.590866 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.601773 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ppklr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e16019f-8b86-49e5-a866-bb10c4c91e44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ppklr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.611745 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.621472 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.632882 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.645394 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.658465 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.668197 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:12Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.674863 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.674901 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.674910 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.674923 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.674933 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:12Z","lastTransitionTime":"2025-10-07T00:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.777692 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.777729 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.777738 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.777753 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.777763 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:12Z","lastTransitionTime":"2025-10-07T00:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.880753 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.880785 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.880794 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.880807 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.880818 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:12Z","lastTransitionTime":"2025-10-07T00:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.983511 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.983844 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.983908 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.984030 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:12 crc kubenswrapper[4791]: I1007 00:12:12.984161 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:12Z","lastTransitionTime":"2025-10-07T00:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.068712 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.068758 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:13 crc kubenswrapper[4791]: E1007 00:12:13.068854 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.068909 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:13 crc kubenswrapper[4791]: E1007 00:12:13.069039 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.069378 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:13 crc kubenswrapper[4791]: E1007 00:12:13.069494 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:13 crc kubenswrapper[4791]: E1007 00:12:13.069643 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.086774 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.086822 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.086837 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.086855 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.086867 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:13Z","lastTransitionTime":"2025-10-07T00:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.189370 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.189455 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.189470 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.189484 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.189493 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:13Z","lastTransitionTime":"2025-10-07T00:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.291579 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.291637 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.291647 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.291661 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.291671 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:13Z","lastTransitionTime":"2025-10-07T00:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.394232 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.394288 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.394300 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.394323 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.394337 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:13Z","lastTransitionTime":"2025-10-07T00:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.497201 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.497232 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.497240 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.497253 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.497261 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:13Z","lastTransitionTime":"2025-10-07T00:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.601805 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.601866 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.601883 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.601902 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.601921 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:13Z","lastTransitionTime":"2025-10-07T00:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.704218 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.704260 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.704269 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.704285 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.704294 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:13Z","lastTransitionTime":"2025-10-07T00:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.806812 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.806852 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.806863 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.806881 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.806892 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:13Z","lastTransitionTime":"2025-10-07T00:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.908840 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.908881 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.908894 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.908910 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:13 crc kubenswrapper[4791]: I1007 00:12:13.908921 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:13Z","lastTransitionTime":"2025-10-07T00:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.011905 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.011953 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.011964 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.011983 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.011996 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:14Z","lastTransitionTime":"2025-10-07T00:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.080059 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ppklr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e16019f-8b86-49e5-a866-bb10c4c91e44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ppklr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.098331 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.113761 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.113938 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.113960 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.114023 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.114043 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.114054 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:14Z","lastTransitionTime":"2025-10-07T00:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.131219 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.146820 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.160043 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.174499 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.187603 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.203639 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.215740 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.217267 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.217311 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.217320 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.217340 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.217352 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:14Z","lastTransitionTime":"2025-10-07T00:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.229273 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.239134 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.252003 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d81d0b5-a614-4621-8bef-837b8a5c631c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f15d5cf1154d171f7e12462edcb9b53d881f545c083ac5fb3334ab7451b2559d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2186ca7217fced1fc3359283d525790e52592c811c1c71faa1d1eca99c9294e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e23b288d4d32b7fdb7734780996ad0f774af3a20123538f7c86311f56044e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.264323 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.278479 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.290058 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.308747 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:12:10Z\\\",\\\"message\\\":\\\"curred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z]\\\\nI1007 00:12:10.862042 6512 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-ppklr in node crc\\\\nI1007 00:12:10.862044 6512 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1007 00:12:10.862058 6512 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1007 00:12:10.861854 6512 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"e4e4203e-87c7-4024-930a-5d6bdfe2bdde\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:12:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.319117 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.319165 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.319175 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.319194 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.319205 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:14Z","lastTransitionTime":"2025-10-07T00:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.321710 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c19d2cc105fd387281d8446c46f986b15023eefafee848098b2d6d51e48a35f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f04fe39226f20df51d2972a54cb180a6bb40f7791efcb5f5b5e098d4c976065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:14Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.421215 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.421259 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.421269 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.421287 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.421301 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:14Z","lastTransitionTime":"2025-10-07T00:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.523422 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.523480 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.523491 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.523505 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.523514 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:14Z","lastTransitionTime":"2025-10-07T00:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.626094 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.626126 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.626148 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.626161 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.626170 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:14Z","lastTransitionTime":"2025-10-07T00:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.729578 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.729632 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.729647 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.729670 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.729683 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:14Z","lastTransitionTime":"2025-10-07T00:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.832339 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.832865 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.832879 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.832893 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.832902 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:14Z","lastTransitionTime":"2025-10-07T00:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.935777 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.935806 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.935816 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.935830 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:14 crc kubenswrapper[4791]: I1007 00:12:14.935840 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:14Z","lastTransitionTime":"2025-10-07T00:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.038154 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.038191 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.038202 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.038220 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.038231 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:15Z","lastTransitionTime":"2025-10-07T00:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.068612 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.068642 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.068643 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.068624 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:15 crc kubenswrapper[4791]: E1007 00:12:15.068727 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:15 crc kubenswrapper[4791]: E1007 00:12:15.068993 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:15 crc kubenswrapper[4791]: E1007 00:12:15.069046 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:15 crc kubenswrapper[4791]: E1007 00:12:15.069014 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.140935 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.140994 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.141006 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.141043 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.141055 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:15Z","lastTransitionTime":"2025-10-07T00:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.243538 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.243575 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.243586 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.243602 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.243612 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:15Z","lastTransitionTime":"2025-10-07T00:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.347694 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.347757 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.347774 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.347795 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.347811 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:15Z","lastTransitionTime":"2025-10-07T00:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.450457 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.450512 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.450532 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.450561 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.450584 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:15Z","lastTransitionTime":"2025-10-07T00:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.553048 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.553098 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.553111 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.553129 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.553140 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:15Z","lastTransitionTime":"2025-10-07T00:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.659966 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.660069 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.660090 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.660124 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.660144 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:15Z","lastTransitionTime":"2025-10-07T00:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.763056 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.763118 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.763129 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.763147 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.763159 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:15Z","lastTransitionTime":"2025-10-07T00:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.865127 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.865175 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.865186 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.865204 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.865217 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:15Z","lastTransitionTime":"2025-10-07T00:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.967696 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.967737 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.967749 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.967767 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:15 crc kubenswrapper[4791]: I1007 00:12:15.967778 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:15Z","lastTransitionTime":"2025-10-07T00:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.070333 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.070369 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.070377 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.070392 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.070419 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:16Z","lastTransitionTime":"2025-10-07T00:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.172204 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.172260 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.172283 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.172305 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.172319 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:16Z","lastTransitionTime":"2025-10-07T00:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.274277 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.274318 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.274325 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.274342 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.274353 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:16Z","lastTransitionTime":"2025-10-07T00:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.376556 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.376619 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.376629 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.376644 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.376656 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:16Z","lastTransitionTime":"2025-10-07T00:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.479239 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.479281 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.479291 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.479309 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.479320 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:16Z","lastTransitionTime":"2025-10-07T00:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.581773 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.581821 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.581833 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.581849 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.581863 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:16Z","lastTransitionTime":"2025-10-07T00:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.684848 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.684896 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.684905 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.684922 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.684931 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:16Z","lastTransitionTime":"2025-10-07T00:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.787342 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.787393 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.787424 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.787486 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.787498 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:16Z","lastTransitionTime":"2025-10-07T00:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.892856 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.892902 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.892910 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.892926 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.892936 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:16Z","lastTransitionTime":"2025-10-07T00:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.995904 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.995953 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.995962 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.995985 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:16 crc kubenswrapper[4791]: I1007 00:12:16.996000 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:16Z","lastTransitionTime":"2025-10-07T00:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.068437 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.068554 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:17 crc kubenswrapper[4791]: E1007 00:12:17.068681 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.068758 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.068768 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:17 crc kubenswrapper[4791]: E1007 00:12:17.068862 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:17 crc kubenswrapper[4791]: E1007 00:12:17.069075 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:17 crc kubenswrapper[4791]: E1007 00:12:17.069162 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.098067 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.098122 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.098135 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.098152 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.098191 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:17Z","lastTransitionTime":"2025-10-07T00:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.201178 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.201234 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.201247 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.201269 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.201285 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:17Z","lastTransitionTime":"2025-10-07T00:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.303445 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.303488 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.303500 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.303516 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.303526 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:17Z","lastTransitionTime":"2025-10-07T00:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.406150 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.406196 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.406207 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.406223 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.406263 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:17Z","lastTransitionTime":"2025-10-07T00:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.508531 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.508572 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.508585 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.508604 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.508616 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:17Z","lastTransitionTime":"2025-10-07T00:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.610739 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.610774 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.610784 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.610800 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.610811 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:17Z","lastTransitionTime":"2025-10-07T00:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.712949 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.712995 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.713006 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.713025 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.713037 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:17Z","lastTransitionTime":"2025-10-07T00:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.815910 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.815955 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.815978 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.815995 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.816006 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:17Z","lastTransitionTime":"2025-10-07T00:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.919295 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.919330 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.919340 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.919361 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:17 crc kubenswrapper[4791]: I1007 00:12:17.919375 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:17Z","lastTransitionTime":"2025-10-07T00:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.022053 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.022092 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.022102 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.022119 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.022130 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:18Z","lastTransitionTime":"2025-10-07T00:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.124819 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.124851 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.124859 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.124872 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.124882 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:18Z","lastTransitionTime":"2025-10-07T00:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.229664 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.229715 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.229724 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.229763 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.229773 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:18Z","lastTransitionTime":"2025-10-07T00:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.332553 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.332616 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.332630 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.332653 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.332669 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:18Z","lastTransitionTime":"2025-10-07T00:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.437212 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.437258 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.437268 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.437287 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.437298 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:18Z","lastTransitionTime":"2025-10-07T00:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.539494 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.539574 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.539608 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.539627 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.539636 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:18Z","lastTransitionTime":"2025-10-07T00:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.641774 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.641813 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.641822 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.641837 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.641848 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:18Z","lastTransitionTime":"2025-10-07T00:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.744500 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.744548 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.744558 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.744574 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.744586 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:18Z","lastTransitionTime":"2025-10-07T00:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.851841 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.851890 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.852000 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.852041 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.852060 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:18Z","lastTransitionTime":"2025-10-07T00:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.954655 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.954723 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.954732 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.954745 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:18 crc kubenswrapper[4791]: I1007 00:12:18.954754 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:18Z","lastTransitionTime":"2025-10-07T00:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.057478 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.057524 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.057535 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.057550 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.057562 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:19Z","lastTransitionTime":"2025-10-07T00:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.069001 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.069048 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:19 crc kubenswrapper[4791]: E1007 00:12:19.069111 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.069068 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:19 crc kubenswrapper[4791]: E1007 00:12:19.069260 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:19 crc kubenswrapper[4791]: E1007 00:12:19.070153 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.070259 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:19 crc kubenswrapper[4791]: E1007 00:12:19.070391 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.159508 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.159550 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.159558 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.159571 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.159581 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:19Z","lastTransitionTime":"2025-10-07T00:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.262675 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.262724 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.262737 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.262753 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.262766 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:19Z","lastTransitionTime":"2025-10-07T00:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.365713 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.365752 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.365765 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.365780 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.365791 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:19Z","lastTransitionTime":"2025-10-07T00:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.468134 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.468163 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.468170 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.468183 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.468195 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:19Z","lastTransitionTime":"2025-10-07T00:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.570909 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.570963 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.570975 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.570990 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.571001 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:19Z","lastTransitionTime":"2025-10-07T00:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.673159 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.673189 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.673197 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.673208 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.673216 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:19Z","lastTransitionTime":"2025-10-07T00:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.775869 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.775925 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.775933 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.775947 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.775958 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:19Z","lastTransitionTime":"2025-10-07T00:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.878657 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.878688 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.878697 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.878709 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.878717 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:19Z","lastTransitionTime":"2025-10-07T00:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.981077 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.981140 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.981150 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.981165 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:19 crc kubenswrapper[4791]: I1007 00:12:19.981175 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:19Z","lastTransitionTime":"2025-10-07T00:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.083479 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.083529 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.083538 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.083551 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.083560 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:20Z","lastTransitionTime":"2025-10-07T00:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.185648 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.185688 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.185700 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.185716 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.185727 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:20Z","lastTransitionTime":"2025-10-07T00:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.288708 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.288756 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.288769 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.288792 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.288802 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:20Z","lastTransitionTime":"2025-10-07T00:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.342504 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.342587 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.342601 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.342624 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.342640 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:20Z","lastTransitionTime":"2025-10-07T00:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:20 crc kubenswrapper[4791]: E1007 00:12:20.358370 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:20Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.363021 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.363082 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.363100 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.363128 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.363149 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:20Z","lastTransitionTime":"2025-10-07T00:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:20 crc kubenswrapper[4791]: E1007 00:12:20.377252 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:20Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.387293 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.387355 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.387368 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.387391 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.387427 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:20Z","lastTransitionTime":"2025-10-07T00:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:20 crc kubenswrapper[4791]: E1007 00:12:20.401524 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:20Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.404473 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.404498 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.404506 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.404521 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.404532 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:20Z","lastTransitionTime":"2025-10-07T00:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:20 crc kubenswrapper[4791]: E1007 00:12:20.416344 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:20Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.419869 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.419905 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.419916 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.419935 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.419946 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:20Z","lastTransitionTime":"2025-10-07T00:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:20 crc kubenswrapper[4791]: E1007 00:12:20.433066 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:20Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:20 crc kubenswrapper[4791]: E1007 00:12:20.433194 4791 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.434840 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.434871 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.434880 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.434896 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.434910 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:20Z","lastTransitionTime":"2025-10-07T00:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.536951 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.537024 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.537036 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.537053 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.537065 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:20Z","lastTransitionTime":"2025-10-07T00:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.640062 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.640118 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.640128 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.640141 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.640150 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:20Z","lastTransitionTime":"2025-10-07T00:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.742368 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.742436 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.742447 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.742463 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.742476 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:20Z","lastTransitionTime":"2025-10-07T00:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.844704 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.844758 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.844767 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.844781 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.844807 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:20Z","lastTransitionTime":"2025-10-07T00:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.946652 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.946718 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.946731 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.946746 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:20 crc kubenswrapper[4791]: I1007 00:12:20.946757 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:20Z","lastTransitionTime":"2025-10-07T00:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.049947 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.049984 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.049993 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.050012 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.050025 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:21Z","lastTransitionTime":"2025-10-07T00:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.068283 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.068358 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.068283 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.068283 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:21 crc kubenswrapper[4791]: E1007 00:12:21.068468 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:21 crc kubenswrapper[4791]: E1007 00:12:21.068512 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:21 crc kubenswrapper[4791]: E1007 00:12:21.068701 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:21 crc kubenswrapper[4791]: E1007 00:12:21.068845 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.152183 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.152220 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.152232 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.152271 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.152283 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:21Z","lastTransitionTime":"2025-10-07T00:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.255070 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.255131 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.255144 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.255165 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.255178 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:21Z","lastTransitionTime":"2025-10-07T00:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.357135 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.357167 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.357176 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.357190 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.357200 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:21Z","lastTransitionTime":"2025-10-07T00:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.429672 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs\") pod \"network-metrics-daemon-ppklr\" (UID: \"9e16019f-8b86-49e5-a866-bb10c4c91e44\") " pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:21 crc kubenswrapper[4791]: E1007 00:12:21.429876 4791 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 00:12:21 crc kubenswrapper[4791]: E1007 00:12:21.429972 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs podName:9e16019f-8b86-49e5-a866-bb10c4c91e44 nodeName:}" failed. No retries permitted until 2025-10-07 00:12:53.429954138 +0000 UTC m=+100.025891789 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs") pod "network-metrics-daemon-ppklr" (UID: "9e16019f-8b86-49e5-a866-bb10c4c91e44") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.460195 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.460262 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.460271 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.460287 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.460296 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:21Z","lastTransitionTime":"2025-10-07T00:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.563550 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.563592 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.563600 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.563615 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.563624 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:21Z","lastTransitionTime":"2025-10-07T00:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.665791 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.665888 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.665902 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.665919 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.665929 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:21Z","lastTransitionTime":"2025-10-07T00:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.768649 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.768731 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.768745 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.768765 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.768778 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:21Z","lastTransitionTime":"2025-10-07T00:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.870559 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.870593 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.870601 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.870614 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.870623 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:21Z","lastTransitionTime":"2025-10-07T00:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.973223 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.973274 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.973283 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.973302 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:21 crc kubenswrapper[4791]: I1007 00:12:21.973320 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:21Z","lastTransitionTime":"2025-10-07T00:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.075179 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.075221 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.075232 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.075249 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.075261 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:22Z","lastTransitionTime":"2025-10-07T00:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.177909 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.177968 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.177979 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.178000 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.178010 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:22Z","lastTransitionTime":"2025-10-07T00:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.280036 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.280081 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.280090 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.280105 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.280114 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:22Z","lastTransitionTime":"2025-10-07T00:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.382495 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.382536 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.382545 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.382558 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.382567 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:22Z","lastTransitionTime":"2025-10-07T00:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.484979 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.485018 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.485029 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.485045 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.485058 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:22Z","lastTransitionTime":"2025-10-07T00:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.587655 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.587712 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.587721 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.587736 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.587747 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:22Z","lastTransitionTime":"2025-10-07T00:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.690457 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.690504 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.690513 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.690525 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.690536 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:22Z","lastTransitionTime":"2025-10-07T00:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.793506 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.793543 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.793552 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.793565 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.793574 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:22Z","lastTransitionTime":"2025-10-07T00:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.896522 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.896564 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.896574 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.896590 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.896601 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:22Z","lastTransitionTime":"2025-10-07T00:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.999313 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.999348 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.999358 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.999370 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:22 crc kubenswrapper[4791]: I1007 00:12:22.999423 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:22Z","lastTransitionTime":"2025-10-07T00:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.068340 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.068421 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.068360 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:23 crc kubenswrapper[4791]: E1007 00:12:23.068496 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:23 crc kubenswrapper[4791]: E1007 00:12:23.068562 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.068437 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:23 crc kubenswrapper[4791]: E1007 00:12:23.068703 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:23 crc kubenswrapper[4791]: E1007 00:12:23.068800 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.102192 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.102239 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.102252 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.102271 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.102285 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:23Z","lastTransitionTime":"2025-10-07T00:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.204735 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.204779 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.204791 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.204808 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.204820 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:23Z","lastTransitionTime":"2025-10-07T00:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.307380 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.307447 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.307458 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.307480 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.307489 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:23Z","lastTransitionTime":"2025-10-07T00:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.409665 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.409709 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.409720 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.409738 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.409749 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:23Z","lastTransitionTime":"2025-10-07T00:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.483251 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xbjfx_8a389028-af4a-4b2c-a638-04eac9238628/kube-multus/0.log" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.483303 4791 generic.go:334] "Generic (PLEG): container finished" podID="8a389028-af4a-4b2c-a638-04eac9238628" containerID="faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053" exitCode=1 Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.483332 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xbjfx" event={"ID":"8a389028-af4a-4b2c-a638-04eac9238628","Type":"ContainerDied","Data":"faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053"} Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.483710 4791 scope.go:117] "RemoveContainer" containerID="faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.495874 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.509121 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ppklr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e16019f-8b86-49e5-a866-bb10c4c91e44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ppklr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.511355 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.511381 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.511392 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.511424 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.511435 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:23Z","lastTransitionTime":"2025-10-07T00:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.530143 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.546329 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.558071 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.568537 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.581853 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.592826 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.605899 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:12:22Z\\\",\\\"message\\\":\\\"2025-10-07T00:11:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b52646ad-a0d3-4efa-8ad5-e4d237567b54\\\\n2025-10-07T00:11:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b52646ad-a0d3-4efa-8ad5-e4d237567b54 to /host/opt/cni/bin/\\\\n2025-10-07T00:11:37Z [verbose] multus-daemon started\\\\n2025-10-07T00:11:37Z [verbose] Readiness Indicator file check\\\\n2025-10-07T00:12:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.614072 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.614110 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.614125 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.614143 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.614154 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:23Z","lastTransitionTime":"2025-10-07T00:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.621681 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.637021 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.647495 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.658770 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c19d2cc105fd387281d8446c46f986b15023eefafee848098b2d6d51e48a35f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f04fe39226f20df51d2972a54cb180a6bb40f7791efcb5f5b5e098d4c976065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.669037 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d81d0b5-a614-4621-8bef-837b8a5c631c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f15d5cf1154d171f7e12462edcb9b53d881f545c083ac5fb3334ab7451b2559d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2186ca7217fced1fc3359283d525790e52592c811c1c71faa1d1eca99c9294e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e23b288d4d32b7fdb7734780996ad0f774af3a20123538f7c86311f56044e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.680450 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.692954 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.703793 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.716840 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.716891 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.716905 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.716924 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.716937 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:23Z","lastTransitionTime":"2025-10-07T00:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.721549 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:12:10Z\\\",\\\"message\\\":\\\"curred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z]\\\\nI1007 00:12:10.862042 6512 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-ppklr in node crc\\\\nI1007 00:12:10.862044 6512 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1007 00:12:10.862058 6512 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1007 00:12:10.861854 6512 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"e4e4203e-87c7-4024-930a-5d6bdfe2bdde\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:12:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:23Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.819290 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.819341 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.819351 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.819367 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.819378 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:23Z","lastTransitionTime":"2025-10-07T00:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.922052 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.922100 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.922114 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.922131 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:23 crc kubenswrapper[4791]: I1007 00:12:23.922140 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:23Z","lastTransitionTime":"2025-10-07T00:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.024907 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.024954 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.024965 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.024983 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.024994 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:24Z","lastTransitionTime":"2025-10-07T00:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.083058 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.093041 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.113896 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d81d0b5-a614-4621-8bef-837b8a5c631c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f15d5cf1154d171f7e12462edcb9b53d881f545c083ac5fb3334ab7451b2559d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2186ca7217fced1fc3359283d525790e52592c811c1c71faa1d1eca99c9294e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e23b288d4d32b7fdb7734780996ad0f774af3a20123538f7c86311f56044e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.127149 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.127188 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.127198 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.127214 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.127226 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:24Z","lastTransitionTime":"2025-10-07T00:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.130101 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.146696 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.161707 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.180822 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:12:10Z\\\",\\\"message\\\":\\\"curred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z]\\\\nI1007 00:12:10.862042 6512 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-ppklr in node crc\\\\nI1007 00:12:10.862044 6512 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1007 00:12:10.862058 6512 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1007 00:12:10.861854 6512 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"e4e4203e-87c7-4024-930a-5d6bdfe2bdde\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:12:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.193682 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c19d2cc105fd387281d8446c46f986b15023eefafee848098b2d6d51e48a35f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f04fe39226f20df51d2972a54cb180a6bb40f7791efcb5f5b5e098d4c976065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.212987 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.224890 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.229417 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.229446 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.229455 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.229471 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.229480 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:24Z","lastTransitionTime":"2025-10-07T00:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.236616 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.246239 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.257990 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.268325 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ppklr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e16019f-8b86-49e5-a866-bb10c4c91e44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ppklr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.281038 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.293620 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.307012 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:12:22Z\\\",\\\"message\\\":\\\"2025-10-07T00:11:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b52646ad-a0d3-4efa-8ad5-e4d237567b54\\\\n2025-10-07T00:11:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b52646ad-a0d3-4efa-8ad5-e4d237567b54 to /host/opt/cni/bin/\\\\n2025-10-07T00:11:37Z [verbose] multus-daemon started\\\\n2025-10-07T00:11:37Z [verbose] Readiness Indicator file check\\\\n2025-10-07T00:12:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.320568 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.332367 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.332421 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.332434 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.332449 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.332486 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:24Z","lastTransitionTime":"2025-10-07T00:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.434996 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.435060 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.435071 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.435086 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.435097 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:24Z","lastTransitionTime":"2025-10-07T00:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.488457 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xbjfx_8a389028-af4a-4b2c-a638-04eac9238628/kube-multus/0.log" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.488512 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xbjfx" event={"ID":"8a389028-af4a-4b2c-a638-04eac9238628","Type":"ContainerStarted","Data":"102807f499ff8337104e02abb4aabfcac759e06177c81422948fda65e540df1b"} Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.503074 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.512019 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.522412 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.536909 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.536940 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.536950 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.536965 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.536973 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:24Z","lastTransitionTime":"2025-10-07T00:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.540373 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:12:10Z\\\",\\\"message\\\":\\\"curred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z]\\\\nI1007 00:12:10.862042 6512 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-ppklr in node crc\\\\nI1007 00:12:10.862044 6512 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1007 00:12:10.862058 6512 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1007 00:12:10.861854 6512 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"e4e4203e-87c7-4024-930a-5d6bdfe2bdde\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:12:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.551776 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c19d2cc105fd387281d8446c46f986b15023eefafee848098b2d6d51e48a35f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f04fe39226f20df51d2972a54cb180a6bb40f7791efcb5f5b5e098d4c976065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.562797 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d81d0b5-a614-4621-8bef-837b8a5c631c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f15d5cf1154d171f7e12462edcb9b53d881f545c083ac5fb3334ab7451b2559d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2186ca7217fced1fc3359283d525790e52592c811c1c71faa1d1eca99c9294e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e23b288d4d32b7fdb7734780996ad0f774af3a20123538f7c86311f56044e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.575089 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.584963 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.595484 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.603772 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.612655 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.622020 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ppklr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e16019f-8b86-49e5-a866-bb10c4c91e44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ppklr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.640068 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.640106 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.640115 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.640129 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.640141 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:24Z","lastTransitionTime":"2025-10-07T00:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.644783 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.657439 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.672332 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.687186 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.702676 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.722231 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://102807f499ff8337104e02abb4aabfcac759e06177c81422948fda65e540df1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:12:22Z\\\",\\\"message\\\":\\\"2025-10-07T00:11:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b52646ad-a0d3-4efa-8ad5-e4d237567b54\\\\n2025-10-07T00:11:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b52646ad-a0d3-4efa-8ad5-e4d237567b54 to /host/opt/cni/bin/\\\\n2025-10-07T00:11:37Z [verbose] multus-daemon started\\\\n2025-10-07T00:11:37Z [verbose] Readiness Indicator file check\\\\n2025-10-07T00:12:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:12:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:24Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.742739 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.742807 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.742821 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.742850 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.742872 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:24Z","lastTransitionTime":"2025-10-07T00:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.845207 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.845245 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.845255 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.845269 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.845278 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:24Z","lastTransitionTime":"2025-10-07T00:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.947846 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.947892 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.947907 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.947926 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:24 crc kubenswrapper[4791]: I1007 00:12:24.947939 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:24Z","lastTransitionTime":"2025-10-07T00:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.049616 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.049668 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.049679 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.049697 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.049708 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:25Z","lastTransitionTime":"2025-10-07T00:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.069244 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.069269 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:25 crc kubenswrapper[4791]: E1007 00:12:25.069379 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.069307 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:25 crc kubenswrapper[4791]: E1007 00:12:25.069540 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.069271 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.069601 4791 scope.go:117] "RemoveContainer" containerID="296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3" Oct 07 00:12:25 crc kubenswrapper[4791]: E1007 00:12:25.069687 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:25 crc kubenswrapper[4791]: E1007 00:12:25.069745 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" Oct 07 00:12:25 crc kubenswrapper[4791]: E1007 00:12:25.069940 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.153119 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.153189 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.153201 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.153220 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.153234 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:25Z","lastTransitionTime":"2025-10-07T00:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.257218 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.257350 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.257378 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.257457 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.257485 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:25Z","lastTransitionTime":"2025-10-07T00:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.360103 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.360159 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.360174 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.360192 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.360204 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:25Z","lastTransitionTime":"2025-10-07T00:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.463491 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.463541 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.463551 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.463567 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.463577 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:25Z","lastTransitionTime":"2025-10-07T00:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.565800 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.565847 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.565859 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.565878 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.565893 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:25Z","lastTransitionTime":"2025-10-07T00:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.668027 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.668104 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.668129 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.668161 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.668188 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:25Z","lastTransitionTime":"2025-10-07T00:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.774605 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.774665 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.774680 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.774710 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.774729 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:25Z","lastTransitionTime":"2025-10-07T00:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.877527 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.877588 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.877605 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.877630 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.877649 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:25Z","lastTransitionTime":"2025-10-07T00:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.981017 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.981099 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.981120 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.981149 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:25 crc kubenswrapper[4791]: I1007 00:12:25.981184 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:25Z","lastTransitionTime":"2025-10-07T00:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.083789 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.083872 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.083893 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.083921 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.083943 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:26Z","lastTransitionTime":"2025-10-07T00:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.186526 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.186569 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.186579 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.186597 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.186607 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:26Z","lastTransitionTime":"2025-10-07T00:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.288794 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.288840 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.288854 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.288872 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.288887 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:26Z","lastTransitionTime":"2025-10-07T00:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.391218 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.391254 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.391262 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.391278 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.391287 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:26Z","lastTransitionTime":"2025-10-07T00:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.493899 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.493963 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.493975 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.493990 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.494020 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:26Z","lastTransitionTime":"2025-10-07T00:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.596208 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.596248 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.596257 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.596274 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.596285 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:26Z","lastTransitionTime":"2025-10-07T00:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.699087 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.699169 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.699191 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.699224 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.699246 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:26Z","lastTransitionTime":"2025-10-07T00:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.802016 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.802061 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.802070 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.802086 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.802097 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:26Z","lastTransitionTime":"2025-10-07T00:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.905136 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.905179 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.905194 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.905213 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:26 crc kubenswrapper[4791]: I1007 00:12:26.905226 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:26Z","lastTransitionTime":"2025-10-07T00:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.008334 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.008394 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.008437 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.008466 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.008483 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:27Z","lastTransitionTime":"2025-10-07T00:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.068965 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.069009 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.069075 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.068972 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:27 crc kubenswrapper[4791]: E1007 00:12:27.069147 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:27 crc kubenswrapper[4791]: E1007 00:12:27.069274 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:27 crc kubenswrapper[4791]: E1007 00:12:27.069350 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:27 crc kubenswrapper[4791]: E1007 00:12:27.069478 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.112585 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.112628 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.112642 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.112665 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.112678 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:27Z","lastTransitionTime":"2025-10-07T00:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.214636 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.214714 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.214723 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.214739 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.214748 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:27Z","lastTransitionTime":"2025-10-07T00:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.316891 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.316938 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.316947 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.316960 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.316972 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:27Z","lastTransitionTime":"2025-10-07T00:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.419877 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.419922 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.419931 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.419947 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.419958 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:27Z","lastTransitionTime":"2025-10-07T00:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.522643 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.522681 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.522689 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.522705 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.522713 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:27Z","lastTransitionTime":"2025-10-07T00:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.626145 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.626198 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.626211 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.626234 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.626247 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:27Z","lastTransitionTime":"2025-10-07T00:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.728669 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.728713 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.728724 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.728742 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.728755 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:27Z","lastTransitionTime":"2025-10-07T00:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.832236 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.832647 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.832658 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.832674 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.832686 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:27Z","lastTransitionTime":"2025-10-07T00:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.935669 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.935765 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.935783 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.935839 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:27 crc kubenswrapper[4791]: I1007 00:12:27.935856 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:27Z","lastTransitionTime":"2025-10-07T00:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.038113 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.038158 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.038170 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.038185 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.038195 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:28Z","lastTransitionTime":"2025-10-07T00:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.141273 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.141598 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.141626 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.141660 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.141687 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:28Z","lastTransitionTime":"2025-10-07T00:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.244135 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.244168 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.244178 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.244193 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.244202 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:28Z","lastTransitionTime":"2025-10-07T00:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.346781 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.346811 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.346819 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.346833 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.346844 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:28Z","lastTransitionTime":"2025-10-07T00:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.449935 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.449997 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.450007 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.450023 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.450033 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:28Z","lastTransitionTime":"2025-10-07T00:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.552781 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.552821 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.552832 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.552851 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.552866 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:28Z","lastTransitionTime":"2025-10-07T00:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.657047 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.657200 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.657232 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.657268 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.657293 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:28Z","lastTransitionTime":"2025-10-07T00:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.760279 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.760326 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.760335 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.760348 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.760358 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:28Z","lastTransitionTime":"2025-10-07T00:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.862156 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.862194 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.862202 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.862215 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.862224 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:28Z","lastTransitionTime":"2025-10-07T00:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.964530 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.964631 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.964641 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.964657 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:28 crc kubenswrapper[4791]: I1007 00:12:28.964667 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:28Z","lastTransitionTime":"2025-10-07T00:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.067265 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.067299 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.067307 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.067321 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.067330 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:29Z","lastTransitionTime":"2025-10-07T00:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.068735 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.068750 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.068802 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.068907 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:29 crc kubenswrapper[4791]: E1007 00:12:29.068969 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:29 crc kubenswrapper[4791]: E1007 00:12:29.069301 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:29 crc kubenswrapper[4791]: E1007 00:12:29.069453 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:29 crc kubenswrapper[4791]: E1007 00:12:29.069539 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.169941 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.170019 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.170033 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.170054 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.170066 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:29Z","lastTransitionTime":"2025-10-07T00:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.273140 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.273192 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.273205 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.273223 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.273233 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:29Z","lastTransitionTime":"2025-10-07T00:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.376544 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.376608 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.376619 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.376641 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.376665 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:29Z","lastTransitionTime":"2025-10-07T00:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.478831 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.478873 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.478885 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.478904 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.478918 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:29Z","lastTransitionTime":"2025-10-07T00:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.581318 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.581368 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.581379 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.581398 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.581424 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:29Z","lastTransitionTime":"2025-10-07T00:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.683547 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.683601 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.683616 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.683633 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.683646 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:29Z","lastTransitionTime":"2025-10-07T00:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.790198 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.790236 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.790244 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.790257 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.790266 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:29Z","lastTransitionTime":"2025-10-07T00:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.892848 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.892924 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.892942 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.892970 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.892991 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:29Z","lastTransitionTime":"2025-10-07T00:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.996505 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.996572 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.996595 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.996625 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:29 crc kubenswrapper[4791]: I1007 00:12:29.996646 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:29Z","lastTransitionTime":"2025-10-07T00:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.100220 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.100286 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.100300 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.100322 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.100337 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:30Z","lastTransitionTime":"2025-10-07T00:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.202927 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.202962 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.202973 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.202986 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.202994 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:30Z","lastTransitionTime":"2025-10-07T00:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.305712 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.305816 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.305843 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.305881 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.305913 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:30Z","lastTransitionTime":"2025-10-07T00:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.408820 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.408876 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.408891 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.408908 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.408919 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:30Z","lastTransitionTime":"2025-10-07T00:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.464850 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.464926 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.464943 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.464969 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.464989 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:30Z","lastTransitionTime":"2025-10-07T00:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:30 crc kubenswrapper[4791]: E1007 00:12:30.481481 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:30Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.488394 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.488497 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.488517 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.488544 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.488565 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:30Z","lastTransitionTime":"2025-10-07T00:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:30 crc kubenswrapper[4791]: E1007 00:12:30.511953 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:30Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.517629 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.517698 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.517719 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.517746 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.517767 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:30Z","lastTransitionTime":"2025-10-07T00:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:30 crc kubenswrapper[4791]: E1007 00:12:30.539060 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:30Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.543638 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.543700 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.543718 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.543744 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.543762 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:30Z","lastTransitionTime":"2025-10-07T00:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:30 crc kubenswrapper[4791]: E1007 00:12:30.560118 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:30Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.566158 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.566226 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.566244 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.566269 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.566287 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:30Z","lastTransitionTime":"2025-10-07T00:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:30 crc kubenswrapper[4791]: E1007 00:12:30.588052 4791 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"900efbc9-0c53-4754-b218-ee742f01afae\\\",\\\"systemUUID\\\":\\\"30c6043d-b881-47c8-9ee1-3608625d7a75\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:30Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:30 crc kubenswrapper[4791]: E1007 00:12:30.588235 4791 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.590518 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.590557 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.590571 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.590588 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.590601 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:30Z","lastTransitionTime":"2025-10-07T00:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.695264 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.695349 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.695376 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.695453 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.695484 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:30Z","lastTransitionTime":"2025-10-07T00:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.799601 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.799678 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.799691 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.799714 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.799728 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:30Z","lastTransitionTime":"2025-10-07T00:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.903315 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.903437 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.903461 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.903492 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:30 crc kubenswrapper[4791]: I1007 00:12:30.903515 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:30Z","lastTransitionTime":"2025-10-07T00:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.007495 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.007571 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.007589 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.007620 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.007642 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:31Z","lastTransitionTime":"2025-10-07T00:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.068605 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.068679 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.068706 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:31 crc kubenswrapper[4791]: E1007 00:12:31.068803 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.068838 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:31 crc kubenswrapper[4791]: E1007 00:12:31.068930 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:31 crc kubenswrapper[4791]: E1007 00:12:31.069072 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:31 crc kubenswrapper[4791]: E1007 00:12:31.069279 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.110450 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.110539 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.110556 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.110584 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.110605 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:31Z","lastTransitionTime":"2025-10-07T00:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.213149 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.213194 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.213206 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.213228 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.213241 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:31Z","lastTransitionTime":"2025-10-07T00:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.316238 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.316289 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.316299 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.316317 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.316328 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:31Z","lastTransitionTime":"2025-10-07T00:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.420107 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.420189 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.420202 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.420221 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.420256 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:31Z","lastTransitionTime":"2025-10-07T00:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.524313 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.524363 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.524372 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.524393 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.524431 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:31Z","lastTransitionTime":"2025-10-07T00:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.627507 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.627569 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.627582 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.627604 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.627618 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:31Z","lastTransitionTime":"2025-10-07T00:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.730762 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.730821 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.730840 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.730868 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.730889 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:31Z","lastTransitionTime":"2025-10-07T00:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.834072 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.834129 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.834140 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.834156 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.834168 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:31Z","lastTransitionTime":"2025-10-07T00:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.937625 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.937690 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.937708 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.937742 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:31 crc kubenswrapper[4791]: I1007 00:12:31.937760 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:31Z","lastTransitionTime":"2025-10-07T00:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.041543 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.041631 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.041694 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.041735 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.041755 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:32Z","lastTransitionTime":"2025-10-07T00:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.143926 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.143958 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.143967 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.143980 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.143989 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:32Z","lastTransitionTime":"2025-10-07T00:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.246942 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.246988 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.247000 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.247017 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.247028 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:32Z","lastTransitionTime":"2025-10-07T00:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.349670 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.349725 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.349734 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.349749 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.349759 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:32Z","lastTransitionTime":"2025-10-07T00:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.453101 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.453162 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.453175 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.453193 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.453202 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:32Z","lastTransitionTime":"2025-10-07T00:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.556048 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.556127 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.556150 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.556185 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.556216 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:32Z","lastTransitionTime":"2025-10-07T00:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.659456 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.659509 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.659518 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.659536 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.659547 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:32Z","lastTransitionTime":"2025-10-07T00:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.763062 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.763108 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.763118 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.763143 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.763154 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:32Z","lastTransitionTime":"2025-10-07T00:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.866456 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.866492 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.866508 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.866526 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.866540 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:32Z","lastTransitionTime":"2025-10-07T00:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.970015 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.970109 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.970127 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.970154 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:32 crc kubenswrapper[4791]: I1007 00:12:32.970175 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:32Z","lastTransitionTime":"2025-10-07T00:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.068672 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.068763 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.068708 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.068669 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:33 crc kubenswrapper[4791]: E1007 00:12:33.068913 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:33 crc kubenswrapper[4791]: E1007 00:12:33.069001 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:33 crc kubenswrapper[4791]: E1007 00:12:33.069105 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:33 crc kubenswrapper[4791]: E1007 00:12:33.069264 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.073469 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.073558 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.073572 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.073597 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.073608 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:33Z","lastTransitionTime":"2025-10-07T00:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.177466 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.177542 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.177557 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.177581 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.177597 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:33Z","lastTransitionTime":"2025-10-07T00:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.281090 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.281136 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.281146 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.281160 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.281169 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:33Z","lastTransitionTime":"2025-10-07T00:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.385541 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.385643 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.385663 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.385693 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.385715 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:33Z","lastTransitionTime":"2025-10-07T00:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.489282 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.489359 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.489382 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.489438 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.489458 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:33Z","lastTransitionTime":"2025-10-07T00:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.592019 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.592054 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.592064 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.592079 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.592087 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:33Z","lastTransitionTime":"2025-10-07T00:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.695082 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.695134 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.695145 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.695164 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.695174 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:33Z","lastTransitionTime":"2025-10-07T00:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.798686 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.798770 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.798793 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.798827 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.798846 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:33Z","lastTransitionTime":"2025-10-07T00:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.901769 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.901804 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.901812 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.901827 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:33 crc kubenswrapper[4791]: I1007 00:12:33.901844 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:33Z","lastTransitionTime":"2025-10-07T00:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.004920 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.004979 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.004991 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.005010 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.005022 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:34Z","lastTransitionTime":"2025-10-07T00:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.090480 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69a62998-533d-402a-b7c6-8737904f4d98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1453def320cb3e2fe93e31ba10296a658c731baedf6f1f931e0af5181db435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4681d55ddd40341c482a76dbee499b7eabc40ca26c38a43eda95dcaf6d84560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4600fd8264090286f68bbadecda5d9db2e19ed1c544e740d538cbef043c51abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdf494f882d6b6e05d453216bb8ff0181594de4a7195ebf19ffa85ae9886198a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12168fb619274b24f253deeb0b24ac7cb3e83806bdd3cc72a106fa1be08dcff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a64e83194f9f527bfaf85b83f062cfc7c7dfd98c857e3431fd38a57adfa88113\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://58d1b1500fc28d73b4e990788724925845a9d42c1bd3adbff1a28c331436e71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f7a7dcfcebc4017acae129c4b87f92efbd264646bc53175c3df3fce186f3264\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.104492 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://469c12b873677a91f5402f4f947342e5e3a7a989897beb28251c12c801ca5e1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd4e9ab6977df0ca0c39a8def6f10be33520f168667a365e85ed455edbc0e6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.107114 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.107151 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.107161 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.107188 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.107201 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:34Z","lastTransitionTime":"2025-10-07T00:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.119494 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.133175 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mgwcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7af405e5-f2fb-4e2e-a452-25e96e1abe40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a142348f76c39e93caebc39a41f94078538c508d46a37cb8481cce4905f3403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gl2wr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mgwcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.149368 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae232b81-12ca-4baa-ad86-96f3fbd32ac9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49008bd43573e753c552720814d6d3a21575b6a14486f0bbd3ddb7818eb17be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8lg8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h728c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.162244 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ppklr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e16019f-8b86-49e5-a866-bb10c4c91e44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zt9k2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ppklr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.177519 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fea7e90-d68f-4eb0-bcd7-4e302677c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad387d338a61abdd6819eae08bc4a387070f0fe0b11fc1f0119ce6685bf8141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49158deb374ca39e1db25cda4ad7c0edf0c3b612ebb474edd1a8351e16f23310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88d08b81e38f592d9079a21aa485546ffae5bb81b34b505a6133950486cf379b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://742ad7a69ff0e072afc37f7bbfa7043ed675a29f2261a4022ff5dcef23a4ea42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.194766 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.209989 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.210264 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.210333 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.210403 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.210501 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:34Z","lastTransitionTime":"2025-10-07T00:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.211190 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xbjfx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a389028-af4a-4b2c-a638-04eac9238628\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://102807f499ff8337104e02abb4aabfcac759e06177c81422948fda65e540df1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:12:22Z\\\",\\\"message\\\":\\\"2025-10-07T00:11:37+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b52646ad-a0d3-4efa-8ad5-e4d237567b54\\\\n2025-10-07T00:11:37+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b52646ad-a0d3-4efa-8ad5-e4d237567b54 to /host/opt/cni/bin/\\\\n2025-10-07T00:11:37Z [verbose] multus-daemon started\\\\n2025-10-07T00:11:37Z [verbose] Readiness Indicator file check\\\\n2025-10-07T00:12:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:12:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w4b4d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xbjfx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.234044 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"caaf5441-9d24-4a73-9c10-a28c7278c2f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd23255e56ce86f1dccfcedf723cf2b12ba5a7f113d727897e8cd8a879be73d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc7b0e5ef9174251ce70592aaec961f8aecbe45259d1f3c86020947b7f141377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ace40310bb8310819d6d0eb657782f581c8daf5145bd92b2dbd0cc1769a0b822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30ea6c288ea39fb98fbe0fd19e35a96c286a986536ed2e5639317ba0c7096f04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e841157d55f8b99c3922a674048a1be2351e4d4377b3b0b2505cba0dbc085a11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cf0a42a803ec203bfea4466ac11e15153a3bb6c6eb3c56c9bfcdd866715593c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8112b238a999b982a0ba10f8ef89391c5c9ff0efadeff82579ebfdece020c36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bz9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q4xzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.254842 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"548a4596-dfd1-455d-ac32-54f665d69e74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64f172a3b2c718394ceb4dcce3b436e499d3d6844d879aacb80aa1a7bc88a26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1deb8f235fbfe4e4b3f617f60d87dcf6ca919894e1fe47748c8596b965eb5f1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92dbd5c589035a662b4856f6b0598f49eb11f862a2f389871d84eec3aa3d2f3c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646b40b84b6347380717e19aac450a8827c275621516a0811cd064e59b7939c0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0601a07a8b31599276b3c37f20c7b0987911c1aad270bf68ec2b6d5b1b74c966\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T00:11:27Z\\\",\\\"message\\\":\\\"W1007 00:11:17.189347 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 00:11:17.190244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759795877 cert, and key in /tmp/serving-cert-332054889/serving-signer.crt, /tmp/serving-cert-332054889/serving-signer.key\\\\nI1007 00:11:17.584749 1 observer_polling.go:159] Starting file observer\\\\nW1007 00:11:17.587169 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 00:11:17.587299 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 00:11:17.589798 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-332054889/tls.crt::/tmp/serving-cert-332054889/tls.key\\\\\\\"\\\\nF1007 00:11:27.780946 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffdec73630d5442ff208a4e01e6a90d69c70b6ce333a647ed13c520c554d55a6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbde24a8e02bacb2d30404dd109beb1c48cf740cc69c537b1fd2b683fb903baa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.269075 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2lpln" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1a690cd-8485-4ab6-aaca-f11c056810c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7140417a8c793f3c86f3bac8553ca61d970dfd925a99a8b3a97582dc2617590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9zcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2lpln\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.283686 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d81d0b5-a614-4621-8bef-837b8a5c631c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:12:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f15d5cf1154d171f7e12462edcb9b53d881f545c083ac5fb3334ab7451b2559d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2186ca7217fced1fc3359283d525790e52592c811c1c71faa1d1eca99c9294e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e23b288d4d32b7fdb7734780996ad0f774af3a20123538f7c86311f56044e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://719fed59d4d9abf91d4d2b4070c009adf0f632f6a12c9e51c02ed06ceade5554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.302608 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94b20234b2957601f604ca47292bfb1891a75adac8a5b9e7c9ae81264743b807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.313643 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.313687 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.313700 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.313721 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.313735 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:34Z","lastTransitionTime":"2025-10-07T00:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.323156 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.336914 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c075fcf65f50ba93578d1ec71d50f77f366ccd900e6790279f0b72e7e54b4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.358873 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47547f34-4a66-4d60-8d38-af69eb320b1d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:35Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T00:12:10Z\\\",\\\"message\\\":\\\"curred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:10Z is after 2025-08-24T17:21:41Z]\\\\nI1007 00:12:10.862042 6512 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-ppklr in node crc\\\\nI1007 00:12:10.862044 6512 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1007 00:12:10.862058 6512 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1007 00:12:10.861854 6512 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"e4e4203e-87c7-4024-930a-5d6bdfe2bdde\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T00:12:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-n6cgf_openshift-ovn-kubernetes(47547f34-4a66-4d60-8d38-af69eb320b1d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T00:11:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T00:11:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-grsfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-n6cgf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.379751 4791 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efe621da-30c8-444b-9016-a08db40a94eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T00:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c19d2cc105fd387281d8446c46f986b15023eefafee848098b2d6d51e48a35f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f04fe39226f20df51d2972a54cb180a6bb40f7791efcb5f5b5e098d4c976065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T00:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5djmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T00:11:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rx5v7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T00:12:34Z is after 2025-08-24T17:21:41Z" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.416607 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.416669 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.416683 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.416704 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.416718 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:34Z","lastTransitionTime":"2025-10-07T00:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.518642 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.518674 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.518683 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.518700 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.518710 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:34Z","lastTransitionTime":"2025-10-07T00:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.620980 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.621023 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.621032 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.621063 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.621072 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:34Z","lastTransitionTime":"2025-10-07T00:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.723699 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.723737 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.723748 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.723764 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.723773 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:34Z","lastTransitionTime":"2025-10-07T00:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.826955 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.827023 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.827035 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.827052 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.827063 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:34Z","lastTransitionTime":"2025-10-07T00:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.930641 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.930715 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.930727 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.930742 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:34 crc kubenswrapper[4791]: I1007 00:12:34.930751 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:34Z","lastTransitionTime":"2025-10-07T00:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.033472 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.033509 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.033517 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.033530 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.033540 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:35Z","lastTransitionTime":"2025-10-07T00:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.068186 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.068241 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.068360 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:35 crc kubenswrapper[4791]: E1007 00:12:35.068429 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.068552 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:35 crc kubenswrapper[4791]: E1007 00:12:35.068854 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:35 crc kubenswrapper[4791]: E1007 00:12:35.068959 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:35 crc kubenswrapper[4791]: E1007 00:12:35.069281 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.136527 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.136584 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.136599 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.136623 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.136638 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:35Z","lastTransitionTime":"2025-10-07T00:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.239887 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.239953 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.239971 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.240000 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.240021 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:35Z","lastTransitionTime":"2025-10-07T00:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.342173 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.342214 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.342222 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.342238 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.342248 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:35Z","lastTransitionTime":"2025-10-07T00:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.446078 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.446129 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.446139 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.446165 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.446178 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:35Z","lastTransitionTime":"2025-10-07T00:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.548676 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.548729 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.548740 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.548757 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.548769 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:35Z","lastTransitionTime":"2025-10-07T00:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.651638 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.651687 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.651699 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.651716 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.651729 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:35Z","lastTransitionTime":"2025-10-07T00:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.753734 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.753777 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.753787 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.753801 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.753811 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:35Z","lastTransitionTime":"2025-10-07T00:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.855742 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.855825 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.855835 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.855852 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.855863 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:35Z","lastTransitionTime":"2025-10-07T00:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.958255 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.958293 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.958305 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.958318 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:35 crc kubenswrapper[4791]: I1007 00:12:35.958327 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:35Z","lastTransitionTime":"2025-10-07T00:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.061013 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.061049 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.061093 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.061109 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.061119 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:36Z","lastTransitionTime":"2025-10-07T00:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.163503 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.163535 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.163543 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.163556 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.163598 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:36Z","lastTransitionTime":"2025-10-07T00:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.265157 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.265199 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.265212 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.265229 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.265242 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:36Z","lastTransitionTime":"2025-10-07T00:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.367264 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.367315 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.367326 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.367345 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.367354 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:36Z","lastTransitionTime":"2025-10-07T00:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.469437 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.469484 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.469494 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.469510 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.469522 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:36Z","lastTransitionTime":"2025-10-07T00:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.572307 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.572378 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.572397 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.572487 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.572511 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:36Z","lastTransitionTime":"2025-10-07T00:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.675500 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.675592 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.675612 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.675659 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.675681 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:36Z","lastTransitionTime":"2025-10-07T00:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.777501 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.777548 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.777557 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.777570 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.777580 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:36Z","lastTransitionTime":"2025-10-07T00:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.879826 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.879858 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.879867 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.879880 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.879890 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:36Z","lastTransitionTime":"2025-10-07T00:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.908361 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:36 crc kubenswrapper[4791]: E1007 00:12:36.908501 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:13:40.908481302 +0000 UTC m=+147.504418963 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.908563 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:36 crc kubenswrapper[4791]: E1007 00:12:36.908699 4791 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:12:36 crc kubenswrapper[4791]: E1007 00:12:36.908742 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:13:40.90873325 +0000 UTC m=+147.504670901 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.983635 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.983677 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.983686 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.983703 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:36 crc kubenswrapper[4791]: I1007 00:12:36.983713 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:36Z","lastTransitionTime":"2025-10-07T00:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.009027 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:37 crc kubenswrapper[4791]: E1007 00:12:37.009222 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:12:37 crc kubenswrapper[4791]: E1007 00:12:37.009260 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:12:37 crc kubenswrapper[4791]: E1007 00:12:37.009274 4791 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:12:37 crc kubenswrapper[4791]: E1007 00:12:37.009336 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 00:13:41.009318637 +0000 UTC m=+147.605256288 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.009868 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.009969 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:37 crc kubenswrapper[4791]: E1007 00:12:37.009973 4791 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:12:37 crc kubenswrapper[4791]: E1007 00:12:37.010021 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 00:12:37 crc kubenswrapper[4791]: E1007 00:12:37.010034 4791 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 00:12:37 crc kubenswrapper[4791]: E1007 00:12:37.010043 4791 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:12:37 crc kubenswrapper[4791]: E1007 00:12:37.010035 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 00:13:41.010025808 +0000 UTC m=+147.605963459 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 00:12:37 crc kubenswrapper[4791]: E1007 00:12:37.010089 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 00:13:41.010069309 +0000 UTC m=+147.606006960 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.069029 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.069074 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.069194 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.069224 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:37 crc kubenswrapper[4791]: E1007 00:12:37.069203 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:37 crc kubenswrapper[4791]: E1007 00:12:37.069466 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:37 crc kubenswrapper[4791]: E1007 00:12:37.069491 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:37 crc kubenswrapper[4791]: E1007 00:12:37.069550 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.086855 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.086893 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.086903 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.086930 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.086941 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:37Z","lastTransitionTime":"2025-10-07T00:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.190445 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.190526 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.190563 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.190584 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.190596 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:37Z","lastTransitionTime":"2025-10-07T00:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.293356 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.293425 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.293438 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.293457 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.293469 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:37Z","lastTransitionTime":"2025-10-07T00:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.395926 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.395974 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.395984 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.395998 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.396008 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:37Z","lastTransitionTime":"2025-10-07T00:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.497979 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.498027 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.498038 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.498056 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.498067 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:37Z","lastTransitionTime":"2025-10-07T00:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.600652 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.600720 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.600732 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.600749 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.600762 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:37Z","lastTransitionTime":"2025-10-07T00:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.704390 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.704452 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.704465 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.704484 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.704494 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:37Z","lastTransitionTime":"2025-10-07T00:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.807008 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.807077 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.807093 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.807125 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.807142 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:37Z","lastTransitionTime":"2025-10-07T00:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.909577 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.909618 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.909627 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.909641 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:37 crc kubenswrapper[4791]: I1007 00:12:37.909651 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:37Z","lastTransitionTime":"2025-10-07T00:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.012066 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.012107 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.012121 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.012143 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.012159 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:38Z","lastTransitionTime":"2025-10-07T00:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.114216 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.114253 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.114264 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.114279 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.114288 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:38Z","lastTransitionTime":"2025-10-07T00:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.216508 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.216766 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.216852 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.216966 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.217047 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:38Z","lastTransitionTime":"2025-10-07T00:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.319818 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.319874 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.319891 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.319908 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.319918 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:38Z","lastTransitionTime":"2025-10-07T00:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.422059 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.422132 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.422142 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.422156 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.422165 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:38Z","lastTransitionTime":"2025-10-07T00:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.525562 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.525613 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.525626 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.525643 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.525655 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:38Z","lastTransitionTime":"2025-10-07T00:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.627815 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.628319 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.628337 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.628360 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.628377 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:38Z","lastTransitionTime":"2025-10-07T00:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.730966 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.731017 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.731035 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.731055 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.731068 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:38Z","lastTransitionTime":"2025-10-07T00:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.833137 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.833178 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.833188 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.833203 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.833216 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:38Z","lastTransitionTime":"2025-10-07T00:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.935561 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.935611 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.935620 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.935636 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:38 crc kubenswrapper[4791]: I1007 00:12:38.935646 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:38Z","lastTransitionTime":"2025-10-07T00:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.037973 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.038013 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.038021 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.038036 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.038046 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:39Z","lastTransitionTime":"2025-10-07T00:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.068369 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.068441 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:39 crc kubenswrapper[4791]: E1007 00:12:39.068504 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.068517 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.068535 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:39 crc kubenswrapper[4791]: E1007 00:12:39.068593 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:39 crc kubenswrapper[4791]: E1007 00:12:39.068676 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:39 crc kubenswrapper[4791]: E1007 00:12:39.068851 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.140747 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.140780 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.140789 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.140803 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.140812 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:39Z","lastTransitionTime":"2025-10-07T00:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.243238 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.243271 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.243281 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.243298 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.243306 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:39Z","lastTransitionTime":"2025-10-07T00:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.345245 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.345276 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.345286 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.345313 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.345322 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:39Z","lastTransitionTime":"2025-10-07T00:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.447372 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.447425 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.447442 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.447456 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.447468 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:39Z","lastTransitionTime":"2025-10-07T00:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.549809 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.549842 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.549851 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.549864 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.549873 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:39Z","lastTransitionTime":"2025-10-07T00:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.652228 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.652282 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.652293 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.652309 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.652320 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:39Z","lastTransitionTime":"2025-10-07T00:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.754130 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.754171 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.754180 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.754192 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.754201 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:39Z","lastTransitionTime":"2025-10-07T00:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.857825 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.858077 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.858107 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.858148 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.858183 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:39Z","lastTransitionTime":"2025-10-07T00:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.961284 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.961326 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.961337 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.961361 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:39 crc kubenswrapper[4791]: I1007 00:12:39.961374 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:39Z","lastTransitionTime":"2025-10-07T00:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.065678 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.065730 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.065743 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.065762 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.065780 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:40Z","lastTransitionTime":"2025-10-07T00:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.069849 4791 scope.go:117] "RemoveContainer" containerID="296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.168677 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.168763 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.168783 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.168819 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.168842 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:40Z","lastTransitionTime":"2025-10-07T00:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.272357 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.272393 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.272417 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.272440 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.272453 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:40Z","lastTransitionTime":"2025-10-07T00:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.374763 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.374817 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.374835 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.374858 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.374875 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:40Z","lastTransitionTime":"2025-10-07T00:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.478226 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.478274 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.478286 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.478304 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.478316 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:40Z","lastTransitionTime":"2025-10-07T00:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.549328 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovnkube-controller/2.log" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.552718 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerStarted","Data":"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a"} Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.553250 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.578107 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=62.578086618 podStartE2EDuration="1m2.578086618s" podCreationTimestamp="2025-10-07 00:11:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:40.577075749 +0000 UTC m=+87.173013400" watchObservedRunningTime="2025-10-07 00:12:40.578086618 +0000 UTC m=+87.174024269" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.580920 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.580966 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.580980 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.580999 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.581012 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:40Z","lastTransitionTime":"2025-10-07T00:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.591336 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.591378 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.591387 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.591448 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.591467 4791 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T00:12:40Z","lastTransitionTime":"2025-10-07T00:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.610359 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-xbjfx" podStartSLOduration=66.610343238 podStartE2EDuration="1m6.610343238s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:40.609979507 +0000 UTC m=+87.205917158" watchObservedRunningTime="2025-10-07 00:12:40.610343238 +0000 UTC m=+87.206280889" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.629424 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-q4xzr" podStartSLOduration=66.629384146 podStartE2EDuration="1m6.629384146s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:40.62742644 +0000 UTC m=+87.223364101" watchObservedRunningTime="2025-10-07 00:12:40.629384146 +0000 UTC m=+87.225321797" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.638382 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj"] Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.638815 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.640952 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.640990 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.641079 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.641265 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.656613 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=68.656588549 podStartE2EDuration="1m8.656588549s" podCreationTimestamp="2025-10-07 00:11:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:40.656073844 +0000 UTC m=+87.252011495" watchObservedRunningTime="2025-10-07 00:12:40.656588549 +0000 UTC m=+87.252526200" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.682459 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-2lpln" podStartSLOduration=66.682434943 podStartE2EDuration="1m6.682434943s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:40.668466031 +0000 UTC m=+87.264403682" watchObservedRunningTime="2025-10-07 00:12:40.682434943 +0000 UTC m=+87.278372594" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.682862 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=33.682855146 podStartE2EDuration="33.682855146s" podCreationTimestamp="2025-10-07 00:12:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:40.682008101 +0000 UTC m=+87.277945752" watchObservedRunningTime="2025-10-07 00:12:40.682855146 +0000 UTC m=+87.278792797" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.746157 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/952bb135-34db-46ba-9aff-41214ba4c7b3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.746219 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/952bb135-34db-46ba-9aff-41214ba4c7b3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.746252 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/952bb135-34db-46ba-9aff-41214ba4c7b3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.746290 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/952bb135-34db-46ba-9aff-41214ba4c7b3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.746469 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/952bb135-34db-46ba-9aff-41214ba4c7b3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.772641 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podStartSLOduration=66.772619312 podStartE2EDuration="1m6.772619312s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:40.760950426 +0000 UTC m=+87.356888087" watchObservedRunningTime="2025-10-07 00:12:40.772619312 +0000 UTC m=+87.368556963" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.783820 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rx5v7" podStartSLOduration=65.783806794 podStartE2EDuration="1m5.783806794s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:40.77465527 +0000 UTC m=+87.370592921" watchObservedRunningTime="2025-10-07 00:12:40.783806794 +0000 UTC m=+87.379744445" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.809623 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=63.809602297 podStartE2EDuration="1m3.809602297s" podCreationTimestamp="2025-10-07 00:11:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:40.808956328 +0000 UTC m=+87.404893979" watchObservedRunningTime="2025-10-07 00:12:40.809602297 +0000 UTC m=+87.405539948" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.847973 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/952bb135-34db-46ba-9aff-41214ba4c7b3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.848034 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/952bb135-34db-46ba-9aff-41214ba4c7b3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.848077 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/952bb135-34db-46ba-9aff-41214ba4c7b3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.848120 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/952bb135-34db-46ba-9aff-41214ba4c7b3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.848154 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/952bb135-34db-46ba-9aff-41214ba4c7b3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.848387 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/952bb135-34db-46ba-9aff-41214ba4c7b3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.848491 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/952bb135-34db-46ba-9aff-41214ba4c7b3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.849342 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/952bb135-34db-46ba-9aff-41214ba4c7b3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.852704 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-mgwcn" podStartSLOduration=66.852678198 podStartE2EDuration="1m6.852678198s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:40.852089151 +0000 UTC m=+87.448026802" watchObservedRunningTime="2025-10-07 00:12:40.852678198 +0000 UTC m=+87.448615849" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.855724 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/952bb135-34db-46ba-9aff-41214ba4c7b3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.864896 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/952bb135-34db-46ba-9aff-41214ba4c7b3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-qr4cj\" (UID: \"952bb135-34db-46ba-9aff-41214ba4c7b3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.871876 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podStartSLOduration=66.871859181 podStartE2EDuration="1m6.871859181s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:40.870227574 +0000 UTC m=+87.466165225" watchObservedRunningTime="2025-10-07 00:12:40.871859181 +0000 UTC m=+87.467796832" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.950611 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.986050 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-ppklr"] Oct 07 00:12:40 crc kubenswrapper[4791]: I1007 00:12:40.986236 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:40 crc kubenswrapper[4791]: E1007 00:12:40.986350 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:41 crc kubenswrapper[4791]: I1007 00:12:41.068449 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:41 crc kubenswrapper[4791]: I1007 00:12:41.068516 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:41 crc kubenswrapper[4791]: E1007 00:12:41.069027 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:41 crc kubenswrapper[4791]: E1007 00:12:41.069123 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:41 crc kubenswrapper[4791]: I1007 00:12:41.069225 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:41 crc kubenswrapper[4791]: E1007 00:12:41.069301 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:41 crc kubenswrapper[4791]: I1007 00:12:41.557777 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" event={"ID":"952bb135-34db-46ba-9aff-41214ba4c7b3","Type":"ContainerStarted","Data":"e0202ed0923e6d77ca255ba684b5ace4c4d30beccc195129d4a6c59679235c69"} Oct 07 00:12:41 crc kubenswrapper[4791]: I1007 00:12:41.557839 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" event={"ID":"952bb135-34db-46ba-9aff-41214ba4c7b3","Type":"ContainerStarted","Data":"bb33908e52378b326906d56be6016b52555a69b3301bc76437a52c540c21d421"} Oct 07 00:12:41 crc kubenswrapper[4791]: I1007 00:12:41.577817 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qr4cj" podStartSLOduration=67.577792548 podStartE2EDuration="1m7.577792548s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:41.573063592 +0000 UTC m=+88.169001253" watchObservedRunningTime="2025-10-07 00:12:41.577792548 +0000 UTC m=+88.173730209" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.068827 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.068841 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.068865 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:43 crc kubenswrapper[4791]: E1007 00:12:43.068973 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 00:12:43 crc kubenswrapper[4791]: E1007 00:12:43.069028 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 00:12:43 crc kubenswrapper[4791]: E1007 00:12:43.069148 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.069338 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:43 crc kubenswrapper[4791]: E1007 00:12:43.069413 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ppklr" podUID="9e16019f-8b86-49e5-a866-bb10c4c91e44" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.253831 4791 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.254040 4791 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.289790 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.290419 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.290667 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-frzvc"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.291902 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.292089 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.292516 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.292577 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6hgql"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.292515 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.293593 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.293804 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.293842 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.312213 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.312222 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.312627 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.314214 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v8f7d"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.314856 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.315246 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.315265 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.315485 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.315518 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.315607 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.315721 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.316210 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.316940 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.317329 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.317515 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.317668 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.317830 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.318137 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.318203 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.319654 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-4scm9"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.320104 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-4scm9" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.320433 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.320822 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.320992 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.321006 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.321130 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.321160 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.321274 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.321391 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.321649 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.321784 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.321885 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.322053 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.322553 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-w9j8t"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.323285 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.323726 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zfd68"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.324446 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.325910 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-pruner-29329920-xf4vz"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.326514 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29329920-xf4vz" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.326984 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-slv5f"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.327292 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.327448 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.328028 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.334622 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.335330 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.335746 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.336159 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.336653 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.337234 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.337880 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.340300 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.340599 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pvkht"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.341423 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.343787 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.343910 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.344044 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.343805 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.343822 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.344232 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.344070 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.352969 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.353342 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.353526 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.353673 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.353810 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.353920 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.354030 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.358644 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.359079 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.359391 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.359580 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.359970 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.343910 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"serviceca" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.362033 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.362244 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.362722 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.363150 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.363432 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.363684 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.363696 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.364347 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.364542 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.364710 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.364898 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zc9tm"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.364987 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.365230 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.366161 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.370770 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zpqb4"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.372535 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"pruner-dockercfg-p7bcw" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.373135 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.374396 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.374874 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.375067 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.375465 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.375683 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.375876 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.376022 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.376130 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.376257 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378292 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e44a2cb9-2537-47b2-852d-5dd31a230920-config\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378350 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdv9q\" (UniqueName: \"kubernetes.io/projected/82b045ef-79e8-4609-b3a4-c0731df5d5d6-kube-api-access-tdv9q\") pod \"machine-approver-56656f9798-zmzdt\" (UID: \"82b045ef-79e8-4609-b3a4-c0731df5d5d6\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378379 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pz94\" (UniqueName: \"kubernetes.io/projected/9d0a85fd-7c96-4655-b36b-4b2e92506513-kube-api-access-4pz94\") pod \"route-controller-manager-6576b87f9c-sw84b\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378428 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89j2v\" (UniqueName: \"kubernetes.io/projected/e44a2cb9-2537-47b2-852d-5dd31a230920-kube-api-access-89j2v\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378455 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44a2cb9-2537-47b2-852d-5dd31a230920-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378500 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/82b045ef-79e8-4609-b3a4-c0731df5d5d6-machine-approver-tls\") pod \"machine-approver-56656f9798-zmzdt\" (UID: \"82b045ef-79e8-4609-b3a4-c0731df5d5d6\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378524 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d0a85fd-7c96-4655-b36b-4b2e92506513-serving-cert\") pod \"route-controller-manager-6576b87f9c-sw84b\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378552 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44a2cb9-2537-47b2-852d-5dd31a230920-service-ca-bundle\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378596 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82b045ef-79e8-4609-b3a4-c0731df5d5d6-config\") pod \"machine-approver-56656f9798-zmzdt\" (UID: \"82b045ef-79e8-4609-b3a4-c0731df5d5d6\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378623 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4ebf8d1c-8509-4eda-9f7a-d034c0cb7500-images\") pod \"machine-api-operator-5694c8668f-frzvc\" (UID: \"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378649 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e44a2cb9-2537-47b2-852d-5dd31a230920-serving-cert\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378680 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ebf8d1c-8509-4eda-9f7a-d034c0cb7500-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-frzvc\" (UID: \"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378698 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ebf8d1c-8509-4eda-9f7a-d034c0cb7500-config\") pod \"machine-api-operator-5694c8668f-frzvc\" (UID: \"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378718 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/82b045ef-79e8-4609-b3a4-c0731df5d5d6-auth-proxy-config\") pod \"machine-approver-56656f9798-zmzdt\" (UID: \"82b045ef-79e8-4609-b3a4-c0731df5d5d6\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.378746 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9d0a85fd-7c96-4655-b36b-4b2e92506513-client-ca\") pod \"route-controller-manager-6576b87f9c-sw84b\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.379487 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d0a85fd-7c96-4655-b36b-4b2e92506513-config\") pod \"route-controller-manager-6576b87f9c-sw84b\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.379514 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqjwp\" (UniqueName: \"kubernetes.io/projected/4ebf8d1c-8509-4eda-9f7a-d034c0cb7500-kube-api-access-vqjwp\") pod \"machine-api-operator-5694c8668f-frzvc\" (UID: \"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.379651 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.384161 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.394240 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.394432 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.394549 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.394879 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.395031 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.395602 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.395787 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.395910 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.396078 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.396179 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.396237 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.396683 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.397311 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.395513 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.405033 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.405049 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.405426 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.406597 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.407054 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.408066 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.409262 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ngtt4"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.410286 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.411053 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.411815 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.412738 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ngtt4" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.417667 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.424432 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-qrrrl"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.425081 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.426824 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-kl5kc"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.427592 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-kl5kc" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.429526 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.430699 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.432979 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.453905 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.457582 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.458596 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.458791 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.460924 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.461158 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.462394 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.463713 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.468493 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.473325 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.475822 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.476153 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.477075 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5k9gm"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.480071 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.481116 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.486311 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.488930 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.489541 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.489883 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5k9gm" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.488934 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ebf8d1c-8509-4eda-9f7a-d034c0cb7500-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-frzvc\" (UID: \"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490012 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ebf8d1c-8509-4eda-9f7a-d034c0cb7500-config\") pod \"machine-api-operator-5694c8668f-frzvc\" (UID: \"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490054 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/82b045ef-79e8-4609-b3a4-c0731df5d5d6-auth-proxy-config\") pod \"machine-approver-56656f9798-zmzdt\" (UID: \"82b045ef-79e8-4609-b3a4-c0731df5d5d6\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490108 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9d0a85fd-7c96-4655-b36b-4b2e92506513-client-ca\") pod \"route-controller-manager-6576b87f9c-sw84b\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490136 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d0a85fd-7c96-4655-b36b-4b2e92506513-config\") pod \"route-controller-manager-6576b87f9c-sw84b\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490168 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqjwp\" (UniqueName: \"kubernetes.io/projected/4ebf8d1c-8509-4eda-9f7a-d034c0cb7500-kube-api-access-vqjwp\") pod \"machine-api-operator-5694c8668f-frzvc\" (UID: \"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490213 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e44a2cb9-2537-47b2-852d-5dd31a230920-config\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490248 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdv9q\" (UniqueName: \"kubernetes.io/projected/82b045ef-79e8-4609-b3a4-c0731df5d5d6-kube-api-access-tdv9q\") pod \"machine-approver-56656f9798-zmzdt\" (UID: \"82b045ef-79e8-4609-b3a4-c0731df5d5d6\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490271 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pz94\" (UniqueName: \"kubernetes.io/projected/9d0a85fd-7c96-4655-b36b-4b2e92506513-kube-api-access-4pz94\") pod \"route-controller-manager-6576b87f9c-sw84b\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490306 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89j2v\" (UniqueName: \"kubernetes.io/projected/e44a2cb9-2537-47b2-852d-5dd31a230920-kube-api-access-89j2v\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490328 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44a2cb9-2537-47b2-852d-5dd31a230920-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490368 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/82b045ef-79e8-4609-b3a4-c0731df5d5d6-machine-approver-tls\") pod \"machine-approver-56656f9798-zmzdt\" (UID: \"82b045ef-79e8-4609-b3a4-c0731df5d5d6\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490387 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d0a85fd-7c96-4655-b36b-4b2e92506513-serving-cert\") pod \"route-controller-manager-6576b87f9c-sw84b\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490419 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44a2cb9-2537-47b2-852d-5dd31a230920-service-ca-bundle\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490461 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82b045ef-79e8-4609-b3a4-c0731df5d5d6-config\") pod \"machine-approver-56656f9798-zmzdt\" (UID: \"82b045ef-79e8-4609-b3a4-c0731df5d5d6\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490483 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4ebf8d1c-8509-4eda-9f7a-d034c0cb7500-images\") pod \"machine-api-operator-5694c8668f-frzvc\" (UID: \"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.490507 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e44a2cb9-2537-47b2-852d-5dd31a230920-serving-cert\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.491349 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sgv8c"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.491906 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.492167 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/82b045ef-79e8-4609-b3a4-c0731df5d5d6-auth-proxy-config\") pod \"machine-approver-56656f9798-zmzdt\" (UID: \"82b045ef-79e8-4609-b3a4-c0731df5d5d6\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.492592 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d0a85fd-7c96-4655-b36b-4b2e92506513-config\") pod \"route-controller-manager-6576b87f9c-sw84b\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.492651 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82b045ef-79e8-4609-b3a4-c0731df5d5d6-config\") pod \"machine-approver-56656f9798-zmzdt\" (UID: \"82b045ef-79e8-4609-b3a4-c0731df5d5d6\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.492773 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9d0a85fd-7c96-4655-b36b-4b2e92506513-client-ca\") pod \"route-controller-manager-6576b87f9c-sw84b\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.493654 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e44a2cb9-2537-47b2-852d-5dd31a230920-config\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.493781 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.494772 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44a2cb9-2537-47b2-852d-5dd31a230920-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.491353 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ebf8d1c-8509-4eda-9f7a-d034c0cb7500-config\") pod \"machine-api-operator-5694c8668f-frzvc\" (UID: \"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.493877 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44a2cb9-2537-47b2-852d-5dd31a230920-service-ca-bundle\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.494261 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.493836 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-sgv8c" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.495640 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.497856 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4ebf8d1c-8509-4eda-9f7a-d034c0cb7500-images\") pod \"machine-api-operator-5694c8668f-frzvc\" (UID: \"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.497993 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ebf8d1c-8509-4eda-9f7a-d034c0cb7500-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-frzvc\" (UID: \"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.499046 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d0a85fd-7c96-4655-b36b-4b2e92506513-serving-cert\") pod \"route-controller-manager-6576b87f9c-sw84b\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.499515 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.499588 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e44a2cb9-2537-47b2-852d-5dd31a230920-serving-cert\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.500338 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.501293 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.501905 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.502294 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-v5pt7"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.502846 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.503196 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.504391 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.505007 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.505066 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.508343 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.508831 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-pldwt"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.508985 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/82b045ef-79e8-4609-b3a4-c0731df5d5d6-machine-approver-tls\") pod \"machine-approver-56656f9798-zmzdt\" (UID: \"82b045ef-79e8-4609-b3a4-c0731df5d5d6\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.509145 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.509197 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.509494 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-pldwt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.509699 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.511181 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-frzvc"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.511568 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.512766 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.514020 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-9rh96"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.514622 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.514899 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9rh96" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.515975 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zfd68"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.516520 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-slv5f"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.519357 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v8f7d"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.519481 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-kl5kc"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.520641 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-w9j8t"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.521985 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.523605 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-4scm9"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.523873 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.524874 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.532188 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.532362 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.533963 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6hgql"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.536385 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.537695 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.541418 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pvkht"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.543197 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.548469 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.552104 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.552828 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.553318 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-pldwt"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.554640 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29329920-xf4vz"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.556040 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zc9tm"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.558044 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.559062 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.560396 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5k9gm"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.561610 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-nct9w"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.565797 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-zvkmb"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.565981 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.566947 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.567035 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.567097 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ngtt4"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.567014 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-zvkmb" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.568129 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.569163 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zpqb4"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.570290 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sgv8c"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.572541 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.573623 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.573900 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.585819 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.587117 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9rh96"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.588342 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-v5pt7"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.589815 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.592315 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-nct9w"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.593736 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-667xs"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.594957 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-667xs" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.595074 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-667xs"] Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.612933 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.633489 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.653178 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.673903 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.692939 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.726789 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.733313 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.753093 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.773240 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.794072 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.813806 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.832875 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.852585 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.873143 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.893086 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.933352 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.953083 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.973604 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 07 00:12:43 crc kubenswrapper[4791]: I1007 00:12:43.994055 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.013008 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.032887 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.053472 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.073638 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.094522 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.113780 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.133257 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.152438 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.174198 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.193480 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.212930 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.232743 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.253605 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.273221 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.293374 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.313140 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.332170 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.354494 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.373203 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.393270 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.413552 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.432740 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.452840 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.473575 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.510702 4791 request.go:700] Waited for 1.018573903s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-api/serviceaccounts/machine-api-operator/token Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.523451 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89j2v\" (UniqueName: \"kubernetes.io/projected/e44a2cb9-2537-47b2-852d-5dd31a230920-kube-api-access-89j2v\") pod \"authentication-operator-69f744f599-6hgql\" (UID: \"e44a2cb9-2537-47b2-852d-5dd31a230920\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.528101 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqjwp\" (UniqueName: \"kubernetes.io/projected/4ebf8d1c-8509-4eda-9f7a-d034c0cb7500-kube-api-access-vqjwp\") pod \"machine-api-operator-5694c8668f-frzvc\" (UID: \"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.532431 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.535979 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.570118 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdv9q\" (UniqueName: \"kubernetes.io/projected/82b045ef-79e8-4609-b3a4-c0731df5d5d6-kube-api-access-tdv9q\") pod \"machine-approver-56656f9798-zmzdt\" (UID: \"82b045ef-79e8-4609-b3a4-c0731df5d5d6\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.574241 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.610043 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pz94\" (UniqueName: \"kubernetes.io/projected/9d0a85fd-7c96-4655-b36b-4b2e92506513-kube-api-access-4pz94\") pod \"route-controller-manager-6576b87f9c-sw84b\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.613255 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.616844 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.628841 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.634904 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: W1007 00:12:44.647454 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod82b045ef_79e8_4609_b3a4_c0731df5d5d6.slice/crio-55f386dc6071ea198bcb563a82e7b0beae0e5c7b87b176d673f868bab5155531 WatchSource:0}: Error finding container 55f386dc6071ea198bcb563a82e7b0beae0e5c7b87b176d673f868bab5155531: Status 404 returned error can't find the container with id 55f386dc6071ea198bcb563a82e7b0beae0e5c7b87b176d673f868bab5155531 Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.655077 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.673187 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.694146 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.713950 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.733788 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.751273 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-frzvc"] Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.754626 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.777159 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.793674 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.812175 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6hgql"] Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.812712 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.819738 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.835457 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.853463 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.873743 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.892985 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.914298 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.933183 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.958160 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.975267 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 07 00:12:44 crc kubenswrapper[4791]: I1007 00:12:44.995867 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.012483 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.014617 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b"] Oct 07 00:12:45 crc kubenswrapper[4791]: W1007 00:12:45.025240 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d0a85fd_7c96_4655_b36b_4b2e92506513.slice/crio-f7caa57e113ee48027a9800c6e31ea37b14209a9dd699e407ea305e6388081ea WatchSource:0}: Error finding container f7caa57e113ee48027a9800c6e31ea37b14209a9dd699e407ea305e6388081ea: Status 404 returned error can't find the container with id f7caa57e113ee48027a9800c6e31ea37b14209a9dd699e407ea305e6388081ea Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.032456 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.052864 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.068056 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.068096 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.068619 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.068814 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.073291 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.092362 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.112841 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.134383 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.155764 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.173061 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.193701 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.212999 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.233565 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.253185 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.273704 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.293098 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.313548 4791 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.333697 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.353651 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.373154 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.393727 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.433618 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.452785 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.473323 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.513157 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3e41faa8-02fb-41b9-9214-89769fee9994-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pjfd2\" (UID: \"3e41faa8-02fb-41b9-9214-89769fee9994\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.513189 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1eca433-649b-4499-b5bf-f43123f0815f-config\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.513207 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plql2\" (UniqueName: \"kubernetes.io/projected/a1eca433-649b-4499-b5bf-f43123f0815f-kube-api-access-plql2\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.513222 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2-config\") pod \"console-operator-58897d9998-w9j8t\" (UID: \"1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2\") " pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.513240 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2-trusted-ca\") pod \"console-operator-58897d9998-w9j8t\" (UID: \"1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2\") " pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.513258 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxrs9\" (UniqueName: \"kubernetes.io/projected/0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7-kube-api-access-qxrs9\") pod \"openshift-apiserver-operator-796bbdcf4f-tr58j\" (UID: \"0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.513277 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/73643213-4cfb-4d70-b821-e78cc379de15-service-ca\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.513294 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txlkg\" (UniqueName: \"kubernetes.io/projected/0a9ec705-d37a-462b-b2d7-ea993046dfb4-kube-api-access-txlkg\") pod \"openshift-config-operator-7777fb866f-kwhf9\" (UID: \"0a9ec705-d37a-462b-b2d7-ea993046dfb4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.513313 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/05472df6-c385-4574-ba3c-844fe282b74b-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.513328 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-encryption-config\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.513666 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0a9ec705-d37a-462b-b2d7-ea993046dfb4-available-featuregates\") pod \"openshift-config-operator-7777fb866f-kwhf9\" (UID: \"0a9ec705-d37a-462b-b2d7-ea993046dfb4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.513802 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.513897 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-tr58j\" (UID: \"0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514388 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-etcd-client\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514443 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fe48572b-ef54-4d09-bdc0-a14cfba2af08-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xrfmk\" (UID: \"fe48572b-ef54-4d09-bdc0-a14cfba2af08\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514471 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-config\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514502 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514503 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tc64j\" (UniqueName: \"kubernetes.io/projected/10bc1887-f9f1-4725-9402-22c515844975-kube-api-access-tc64j\") pod \"cluster-samples-operator-665b6dd947-lp9bd\" (UID: \"10bc1887-f9f1-4725-9402-22c515844975\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514561 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514580 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8hm9\" (UniqueName: \"kubernetes.io/projected/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-kube-api-access-t8hm9\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514605 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-692z7\" (UniqueName: \"kubernetes.io/projected/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-kube-api-access-692z7\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514622 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-etcd-client\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514639 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-audit-dir\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514678 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/10bc1887-f9f1-4725-9402-22c515844975-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lp9bd\" (UID: \"10bc1887-f9f1-4725-9402-22c515844975\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514698 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a9ec705-d37a-462b-b2d7-ea993046dfb4-serving-cert\") pod \"openshift-config-operator-7777fb866f-kwhf9\" (UID: \"0a9ec705-d37a-462b-b2d7-ea993046dfb4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514719 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514758 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwrdf\" (UniqueName: \"kubernetes.io/projected/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-kube-api-access-hwrdf\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.514975 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/05472df6-c385-4574-ba3c-844fe282b74b-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515030 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a1eca433-649b-4499-b5bf-f43123f0815f-etcd-client\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515084 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515125 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515146 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/73643213-4cfb-4d70-b821-e78cc379de15-oauth-serving-cert\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515172 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05472df6-c385-4574-ba3c-844fe282b74b-trusted-ca\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515198 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515220 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515245 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-config\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515284 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-477r5\" (UniqueName: \"kubernetes.io/projected/1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2-kube-api-access-477r5\") pod \"console-operator-58897d9998-w9j8t\" (UID: \"1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2\") " pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515329 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/73643213-4cfb-4d70-b821-e78cc379de15-console-config\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515356 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vd4bw\" (UniqueName: \"kubernetes.io/projected/4cef9a77-b44f-41a4-87af-0e5230970af6-kube-api-access-vd4bw\") pod \"downloads-7954f5f757-4scm9\" (UID: \"4cef9a77-b44f-41a4-87af-0e5230970af6\") " pod="openshift-console/downloads-7954f5f757-4scm9" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515383 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515489 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/a1eca433-649b-4499-b5bf-f43123f0815f-etcd-service-ca\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515539 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515590 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mr5r2\" (UniqueName: \"kubernetes.io/projected/73643213-4cfb-4d70-b821-e78cc379de15-kube-api-access-mr5r2\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515767 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3e41faa8-02fb-41b9-9214-89769fee9994-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pjfd2\" (UID: \"3e41faa8-02fb-41b9-9214-89769fee9994\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515798 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1eca433-649b-4499-b5bf-f43123f0815f-serving-cert\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515825 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe48572b-ef54-4d09-bdc0-a14cfba2af08-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xrfmk\" (UID: \"fe48572b-ef54-4d09-bdc0-a14cfba2af08\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.515983 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp66q\" (UniqueName: \"kubernetes.io/projected/9a1fa4d9-9173-47fd-bc14-68317d5adfa4-kube-api-access-vp66q\") pod \"image-pruner-29329920-xf4vz\" (UID: \"9a1fa4d9-9173-47fd-bc14-68317d5adfa4\") " pod="openshift-image-registry/image-pruner-29329920-xf4vz" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516023 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1aa3f88f-af84-47d7-84cd-0a195a373a57-audit-dir\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516058 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/a1eca433-649b-4499-b5bf-f43123f0815f-etcd-ca\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516081 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-tr58j\" (UID: \"0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516113 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/73643213-4cfb-4d70-b821-e78cc379de15-console-serving-cert\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516137 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-registry-tls\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516160 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516176 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe48572b-ef54-4d09-bdc0-a14cfba2af08-config\") pod \"kube-apiserver-operator-766d6c64bb-xrfmk\" (UID: \"fe48572b-ef54-4d09-bdc0-a14cfba2af08\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516207 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-audit-policies\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516238 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516299 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516320 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-etcd-serving-ca\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516365 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/73643213-4cfb-4d70-b821-e78cc379de15-console-oauth-config\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516430 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/05472df6-c385-4574-ba3c-844fe282b74b-registry-certificates\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516472 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-node-pullsecrets\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516521 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/9a1fa4d9-9173-47fd-bc14-68317d5adfa4-serviceca\") pod \"image-pruner-29329920-xf4vz\" (UID: \"9a1fa4d9-9173-47fd-bc14-68317d5adfa4\") " pod="openshift-image-registry/image-pruner-29329920-xf4vz" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516561 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-serving-cert\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.516584 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2-serving-cert\") pod \"console-operator-58897d9998-w9j8t\" (UID: \"1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2\") " pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:45 crc kubenswrapper[4791]: E1007 00:12:45.516905 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:46.016887059 +0000 UTC m=+92.612824710 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517056 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-image-import-ca\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517109 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517164 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxdhv\" (UniqueName: \"kubernetes.io/projected/1aa3f88f-af84-47d7-84cd-0a195a373a57-kube-api-access-sxdhv\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517390 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggmqp\" (UniqueName: \"kubernetes.io/projected/3e41faa8-02fb-41b9-9214-89769fee9994-kube-api-access-ggmqp\") pod \"cluster-image-registry-operator-dc59b4c8b-pjfd2\" (UID: \"3e41faa8-02fb-41b9-9214-89769fee9994\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517440 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-audit-dir\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517457 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517494 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-encryption-config\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517569 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517590 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-bound-sa-token\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517608 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-audit-policies\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517623 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-serving-cert\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517645 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-serving-cert\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517661 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/73643213-4cfb-4d70-b821-e78cc379de15-trusted-ca-bundle\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517701 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517767 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-client-ca\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517793 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3e41faa8-02fb-41b9-9214-89769fee9994-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pjfd2\" (UID: \"3e41faa8-02fb-41b9-9214-89769fee9994\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517809 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-audit\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.517842 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9z7m\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-kube-api-access-h9z7m\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.533330 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.552511 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.571533 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" event={"ID":"82b045ef-79e8-4609-b3a4-c0731df5d5d6","Type":"ContainerStarted","Data":"bbd5dc28006a46b38c86cc347c3e8e0f0bfe3c6141b0845d00de9a68ed9f616f"} Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.571589 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" event={"ID":"82b045ef-79e8-4609-b3a4-c0731df5d5d6","Type":"ContainerStarted","Data":"c9f5f578864d9e8330cf3edc060495de5398e2e2afb3e4bd3ed9fd3066e84221"} Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.571604 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" event={"ID":"82b045ef-79e8-4609-b3a4-c0731df5d5d6","Type":"ContainerStarted","Data":"55f386dc6071ea198bcb563a82e7b0beae0e5c7b87b176d673f868bab5155531"} Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.572425 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.572861 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" event={"ID":"e44a2cb9-2537-47b2-852d-5dd31a230920","Type":"ContainerStarted","Data":"f27f901bdd68b34b8e913d1ddc06a9c3725941dfdd0688b5d293738924916cc2"} Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.572911 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" event={"ID":"e44a2cb9-2537-47b2-852d-5dd31a230920","Type":"ContainerStarted","Data":"7f68ede40e593d2e88f4c89eb831192dc4fa70dcc1b1f6faa4c6b1e1b2ec561d"} Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.576564 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" event={"ID":"9d0a85fd-7c96-4655-b36b-4b2e92506513","Type":"ContainerStarted","Data":"6dce6e7cd8393ee1cb2e486cb375928c06ffb87d9a7929841d952bae1e6a7e69"} Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.576632 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" event={"ID":"9d0a85fd-7c96-4655-b36b-4b2e92506513","Type":"ContainerStarted","Data":"f7caa57e113ee48027a9800c6e31ea37b14209a9dd699e407ea305e6388081ea"} Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.577476 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.581224 4791 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-sw84b container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.581306 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" podUID="9d0a85fd-7c96-4655-b36b-4b2e92506513" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.581675 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" event={"ID":"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500","Type":"ContainerStarted","Data":"054f3ab157d2034024a7d7b988ab3c6585b980594ed7bfce872232e8fac43576"} Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.581729 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" event={"ID":"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500","Type":"ContainerStarted","Data":"694e8352757ac807ed3e9ed43f7218171e694cb431c7dda1e6a3939a5ccd9c1d"} Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.581740 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" event={"ID":"4ebf8d1c-8509-4eda-9f7a-d034c0cb7500","Type":"ContainerStarted","Data":"196a9700df8272563785331d1bd571fb898b68b4ea3e0519b7ac2068c3bf4b9f"} Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.594599 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.613325 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.619144 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.619313 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0a9ec705-d37a-462b-b2d7-ea993046dfb4-available-featuregates\") pod \"openshift-config-operator-7777fb866f-kwhf9\" (UID: \"0a9ec705-d37a-462b-b2d7-ea993046dfb4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.619345 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a08b78ba-2e2b-4b92-9287-24d8f62065db-config\") pod \"service-ca-operator-777779d784-x2kmt\" (UID: \"a08b78ba-2e2b-4b92-9287-24d8f62065db\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt" Oct 07 00:12:45 crc kubenswrapper[4791]: E1007 00:12:45.619453 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:46.119384222 +0000 UTC m=+92.715321883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.619520 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4f9t\" (UniqueName: \"kubernetes.io/projected/ece480d8-b3fb-43be-86cb-8156b9dc7f8c-kube-api-access-t4f9t\") pod \"machine-config-server-zvkmb\" (UID: \"ece480d8-b3fb-43be-86cb-8156b9dc7f8c\") " pod="openshift-machine-config-operator/machine-config-server-zvkmb" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.619589 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-tr58j\" (UID: \"0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.619680 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0a9ec705-d37a-462b-b2d7-ea993046dfb4-available-featuregates\") pod \"openshift-config-operator-7777fb866f-kwhf9\" (UID: \"0a9ec705-d37a-462b-b2d7-ea993046dfb4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620306 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-tr58j\" (UID: \"0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620388 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bb192277-2562-4d66-9f6b-0d3a672b2c91-cert\") pod \"ingress-canary-9rh96\" (UID: \"bb192277-2562-4d66-9f6b-0d3a672b2c91\") " pod="openshift-ingress-canary/ingress-canary-9rh96" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620433 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-config\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620455 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqlzx\" (UniqueName: \"kubernetes.io/projected/03c4f1e9-30d9-4874-9fd6-f70af400d062-kube-api-access-dqlzx\") pod \"ingress-operator-5b745b69d9-6tpbj\" (UID: \"03c4f1e9-30d9-4874-9fd6-f70af400d062\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620477 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ca76de13-3b68-41a3-a059-7e8f09c5d4b1-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-fs5qp\" (UID: \"ca76de13-3b68-41a3-a059-7e8f09c5d4b1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620498 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tc64j\" (UniqueName: \"kubernetes.io/projected/10bc1887-f9f1-4725-9402-22c515844975-kube-api-access-tc64j\") pod \"cluster-samples-operator-665b6dd947-lp9bd\" (UID: \"10bc1887-f9f1-4725-9402-22c515844975\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620515 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620534 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-mountpoint-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620551 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5nnr\" (UniqueName: \"kubernetes.io/projected/20a4713c-cab0-4783-951b-1607d1d64c1d-kube-api-access-f5nnr\") pod \"collect-profiles-29329920-jzqmv\" (UID: \"20a4713c-cab0-4783-951b-1607d1d64c1d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620571 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6e45939a-f804-4553-bced-da13026cdc92-stats-auth\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620590 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-etcd-client\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620608 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-plugins-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620628 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/10bc1887-f9f1-4725-9402-22c515844975-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lp9bd\" (UID: \"10bc1887-f9f1-4725-9402-22c515844975\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620647 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwrdf\" (UniqueName: \"kubernetes.io/projected/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-kube-api-access-hwrdf\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620675 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/05472df6-c385-4574-ba3c-844fe282b74b-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620694 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a1eca433-649b-4499-b5bf-f43123f0815f-etcd-client\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620781 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620856 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/73643213-4cfb-4d70-b821-e78cc379de15-oauth-serving-cert\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620888 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/92e3eb26-afd9-4858-b403-f648c995f27e-tmpfs\") pod \"packageserver-d55dfcdfc-g26wx\" (UID: \"92e3eb26-afd9-4858-b403-f648c995f27e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620918 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-config\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620945 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82sxq\" (UniqueName: \"kubernetes.io/projected/9fa67b2a-f495-46f3-9f72-733f45966312-kube-api-access-82sxq\") pod \"dns-operator-744455d44c-kl5kc\" (UID: \"9fa67b2a-f495-46f3-9f72-733f45966312\") " pod="openshift-dns-operator/dns-operator-744455d44c-kl5kc" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620969 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/73643213-4cfb-4d70-b821-e78cc379de15-console-config\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.620995 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cklph\" (UniqueName: \"kubernetes.io/projected/1a6878f8-306c-4cc8-bbd4-68bce70b0e02-kube-api-access-cklph\") pod \"multus-admission-controller-857f4d67dd-sgv8c\" (UID: \"1a6878f8-306c-4cc8-bbd4-68bce70b0e02\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sgv8c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621024 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/087fdac3-53cf-47af-b3e3-3ffae331f5de-images\") pod \"machine-config-operator-74547568cd-wkgds\" (UID: \"087fdac3-53cf-47af-b3e3-3ffae331f5de\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621046 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgvxw\" (UniqueName: \"kubernetes.io/projected/92e3eb26-afd9-4858-b403-f648c995f27e-kube-api-access-zgvxw\") pod \"packageserver-d55dfcdfc-g26wx\" (UID: \"92e3eb26-afd9-4858-b403-f648c995f27e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621070 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621097 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d8e13d63-c1dd-4a68-bd18-b65592799f10-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5k9gm\" (UID: \"d8e13d63-c1dd-4a68-bd18-b65592799f10\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5k9gm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621125 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/a1eca433-649b-4499-b5bf-f43123f0815f-etcd-service-ca\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621147 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mr5r2\" (UniqueName: \"kubernetes.io/projected/73643213-4cfb-4d70-b821-e78cc379de15-kube-api-access-mr5r2\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621171 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1a6878f8-306c-4cc8-bbd4-68bce70b0e02-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sgv8c\" (UID: \"1a6878f8-306c-4cc8-bbd4-68bce70b0e02\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sgv8c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621197 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1eca433-649b-4499-b5bf-f43123f0815f-serving-cert\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621221 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe48572b-ef54-4d09-bdc0-a14cfba2af08-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xrfmk\" (UID: \"fe48572b-ef54-4d09-bdc0-a14cfba2af08\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621242 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp66q\" (UniqueName: \"kubernetes.io/projected/9a1fa4d9-9173-47fd-bc14-68317d5adfa4-kube-api-access-vp66q\") pod \"image-pruner-29329920-xf4vz\" (UID: \"9a1fa4d9-9173-47fd-bc14-68317d5adfa4\") " pod="openshift-image-registry/image-pruner-29329920-xf4vz" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621263 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1aa3f88f-af84-47d7-84cd-0a195a373a57-audit-dir\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621285 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/a1eca433-649b-4499-b5bf-f43123f0815f-etcd-ca\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621308 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621327 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-audit-policies\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621350 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621373 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/73643213-4cfb-4d70-b821-e78cc379de15-console-oauth-config\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621419 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/03c4f1e9-30d9-4874-9fd6-f70af400d062-metrics-tls\") pod \"ingress-operator-5b745b69d9-6tpbj\" (UID: \"03c4f1e9-30d9-4874-9fd6-f70af400d062\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621443 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85ndm\" (UniqueName: \"kubernetes.io/projected/ca76de13-3b68-41a3-a059-7e8f09c5d4b1-kube-api-access-85ndm\") pod \"package-server-manager-789f6589d5-fs5qp\" (UID: \"ca76de13-3b68-41a3-a059-7e8f09c5d4b1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621466 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-node-pullsecrets\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621491 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsvgl\" (UniqueName: \"kubernetes.io/projected/76b01d04-5f42-4ef1-93fe-75065fa32f3f-kube-api-access-bsvgl\") pod \"kube-storage-version-migrator-operator-b67b599dd-wcpj8\" (UID: \"76b01d04-5f42-4ef1-93fe-75065fa32f3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621514 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-registration-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621537 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vr9t\" (UniqueName: \"kubernetes.io/projected/c532c1dd-76ac-439b-8d24-d80260c10658-kube-api-access-2vr9t\") pod \"dns-default-667xs\" (UID: \"c532c1dd-76ac-439b-8d24-d80260c10658\") " pod="openshift-dns/dns-default-667xs" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621561 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/9a1fa4d9-9173-47fd-bc14-68317d5adfa4-serviceca\") pod \"image-pruner-29329920-xf4vz\" (UID: \"9a1fa4d9-9173-47fd-bc14-68317d5adfa4\") " pod="openshift-image-registry/image-pruner-29329920-xf4vz" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621583 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2-serving-cert\") pod \"console-operator-58897d9998-w9j8t\" (UID: \"1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2\") " pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621611 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d8a0573b-8704-46f7-8212-cead6f1911e8-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c87p6\" (UID: \"d8a0573b-8704-46f7-8212-cead6f1911e8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621634 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfpcc\" (UniqueName: \"kubernetes.io/projected/6e45939a-f804-4553-bced-da13026cdc92-kube-api-access-kfpcc\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621654 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5aa32688-51a9-459f-84ab-9c46aacb71f5-srv-cert\") pod \"olm-operator-6b444d44fb-sfc8c\" (UID: \"5aa32688-51a9-459f-84ab-9c46aacb71f5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621680 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6e45939a-f804-4553-bced-da13026cdc92-default-certificate\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621709 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggmqp\" (UniqueName: \"kubernetes.io/projected/3e41faa8-02fb-41b9-9214-89769fee9994-kube-api-access-ggmqp\") pod \"cluster-image-registry-operator-dc59b4c8b-pjfd2\" (UID: \"3e41faa8-02fb-41b9-9214-89769fee9994\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621733 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621763 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-encryption-config\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621786 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/92e3eb26-afd9-4858-b403-f648c995f27e-webhook-cert\") pod \"packageserver-d55dfcdfc-g26wx\" (UID: \"92e3eb26-afd9-4858-b403-f648c995f27e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621822 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-audit-policies\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621845 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/193bbb8a-6ebb-48fe-9d1f-25b25e990cb6-config\") pod \"kube-controller-manager-operator-78b949d7b-rxfr6\" (UID: \"193bbb8a-6ebb-48fe-9d1f-25b25e990cb6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621869 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/087fdac3-53cf-47af-b3e3-3ffae331f5de-auth-proxy-config\") pod \"machine-config-operator-74547568cd-wkgds\" (UID: \"087fdac3-53cf-47af-b3e3-3ffae331f5de\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621892 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/193bbb8a-6ebb-48fe-9d1f-25b25e990cb6-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-rxfr6\" (UID: \"193bbb8a-6ebb-48fe-9d1f-25b25e990cb6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621915 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/73643213-4cfb-4d70-b821-e78cc379de15-trusted-ca-bundle\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621927 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-config\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.621936 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8a0573b-8704-46f7-8212-cead6f1911e8-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c87p6\" (UID: \"d8a0573b-8704-46f7-8212-cead6f1911e8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622007 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622041 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/76b01d04-5f42-4ef1-93fe-75065fa32f3f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wcpj8\" (UID: \"76b01d04-5f42-4ef1-93fe-75065fa32f3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622049 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/73643213-4cfb-4d70-b821-e78cc379de15-oauth-serving-cert\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622072 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ce18217-b5e7-45ed-8343-32ac27c730a4-proxy-tls\") pod \"machine-config-controller-84d6567774-l5qdp\" (UID: \"2ce18217-b5e7-45ed-8343-32ac27c730a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622097 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55885527-fa8a-4a03-8c8e-e0581a6d9bbd-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-x9qxv\" (UID: \"55885527-fa8a-4a03-8c8e-e0581a6d9bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622121 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjfcj\" (UniqueName: \"kubernetes.io/projected/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-kube-api-access-wjfcj\") pod \"marketplace-operator-79b997595-v5pt7\" (UID: \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622154 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2-config\") pod \"console-operator-58897d9998-w9j8t\" (UID: \"1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2\") " pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622182 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2-trusted-ca\") pod \"console-operator-58897d9998-w9j8t\" (UID: \"1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2\") " pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622208 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/73643213-4cfb-4d70-b821-e78cc379de15-service-ca\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622235 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a08b78ba-2e2b-4b92-9287-24d8f62065db-serving-cert\") pod \"service-ca-operator-777779d784-x2kmt\" (UID: \"a08b78ba-2e2b-4b92-9287-24d8f62065db\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622262 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txlkg\" (UniqueName: \"kubernetes.io/projected/0a9ec705-d37a-462b-b2d7-ea993046dfb4-kube-api-access-txlkg\") pod \"openshift-config-operator-7777fb866f-kwhf9\" (UID: \"0a9ec705-d37a-462b-b2d7-ea993046dfb4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622290 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-encryption-config\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622327 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/92e3eb26-afd9-4858-b403-f648c995f27e-apiservice-cert\") pod \"packageserver-d55dfcdfc-g26wx\" (UID: \"92e3eb26-afd9-4858-b403-f648c995f27e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622356 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622412 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-etcd-client\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622442 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fe48572b-ef54-4d09-bdc0-a14cfba2af08-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xrfmk\" (UID: \"fe48572b-ef54-4d09-bdc0-a14cfba2af08\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622465 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8hm9\" (UniqueName: \"kubernetes.io/projected/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-kube-api-access-t8hm9\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622490 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmj7n\" (UniqueName: \"kubernetes.io/projected/2ce18217-b5e7-45ed-8343-32ac27c730a4-kube-api-access-pmj7n\") pod \"machine-config-controller-84d6567774-l5qdp\" (UID: \"2ce18217-b5e7-45ed-8343-32ac27c730a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622520 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-socket-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622559 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-692z7\" (UniqueName: \"kubernetes.io/projected/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-kube-api-access-692z7\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622583 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/73643213-4cfb-4d70-b821-e78cc379de15-console-config\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622587 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-audit-dir\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622622 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-audit-dir\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622632 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7zvj\" (UniqueName: \"kubernetes.io/projected/bb192277-2562-4d66-9f6b-0d3a672b2c91-kube-api-access-g7zvj\") pod \"ingress-canary-9rh96\" (UID: \"bb192277-2562-4d66-9f6b-0d3a672b2c91\") " pod="openshift-ingress-canary/ingress-canary-9rh96" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.622659 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a9ec705-d37a-462b-b2d7-ea993046dfb4-serving-cert\") pod \"openshift-config-operator-7777fb866f-kwhf9\" (UID: \"0a9ec705-d37a-462b-b2d7-ea993046dfb4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623032 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623074 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623108 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d2e62b58-64d4-48d6-8e84-b2e2592671a3-srv-cert\") pod \"catalog-operator-68c6474976-v95jm\" (UID: \"d2e62b58-64d4-48d6-8e84-b2e2592671a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623127 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9fa67b2a-f495-46f3-9f72-733f45966312-metrics-tls\") pod \"dns-operator-744455d44c-kl5kc\" (UID: \"9fa67b2a-f495-46f3-9f72-733f45966312\") " pod="openshift-dns-operator/dns-operator-744455d44c-kl5kc" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623148 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-v5pt7\" (UID: \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623170 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623187 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xz95\" (UniqueName: \"kubernetes.io/projected/a08b78ba-2e2b-4b92-9287-24d8f62065db-kube-api-access-8xz95\") pod \"service-ca-operator-777779d784-x2kmt\" (UID: \"a08b78ba-2e2b-4b92-9287-24d8f62065db\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623210 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5aa32688-51a9-459f-84ab-9c46aacb71f5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-sfc8c\" (UID: \"5aa32688-51a9-459f-84ab-9c46aacb71f5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623229 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e45939a-f804-4553-bced-da13026cdc92-service-ca-bundle\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623256 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c532c1dd-76ac-439b-8d24-d80260c10658-config-volume\") pod \"dns-default-667xs\" (UID: \"c532c1dd-76ac-439b-8d24-d80260c10658\") " pod="openshift-dns/dns-default-667xs" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623276 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05472df6-c385-4574-ba3c-844fe282b74b-trusted-ca\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623296 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623320 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623337 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-477r5\" (UniqueName: \"kubernetes.io/projected/1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2-kube-api-access-477r5\") pod \"console-operator-58897d9998-w9j8t\" (UID: \"1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2\") " pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623353 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz7kv\" (UniqueName: \"kubernetes.io/projected/cfc06b76-a4a9-419a-9079-2509c12dec45-kube-api-access-wz7kv\") pod \"migrator-59844c95c7-ngtt4\" (UID: \"cfc06b76-a4a9-419a-9079-2509c12dec45\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ngtt4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623370 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c532c1dd-76ac-439b-8d24-d80260c10658-metrics-tls\") pod \"dns-default-667xs\" (UID: \"c532c1dd-76ac-439b-8d24-d80260c10658\") " pod="openshift-dns/dns-default-667xs" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623387 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vd4bw\" (UniqueName: \"kubernetes.io/projected/4cef9a77-b44f-41a4-87af-0e5230970af6-kube-api-access-vd4bw\") pod \"downloads-7954f5f757-4scm9\" (UID: \"4cef9a77-b44f-41a4-87af-0e5230970af6\") " pod="openshift-console/downloads-7954f5f757-4scm9" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623424 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/087fdac3-53cf-47af-b3e3-3ffae331f5de-proxy-tls\") pod \"machine-config-operator-74547568cd-wkgds\" (UID: \"087fdac3-53cf-47af-b3e3-3ffae331f5de\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623445 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623463 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76b01d04-5f42-4ef1-93fe-75065fa32f3f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wcpj8\" (UID: \"76b01d04-5f42-4ef1-93fe-75065fa32f3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623500 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3e41faa8-02fb-41b9-9214-89769fee9994-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pjfd2\" (UID: \"3e41faa8-02fb-41b9-9214-89769fee9994\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623517 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/2fcc480a-55be-4437-b306-7d1e725dea45-signing-cabundle\") pod \"service-ca-9c57cc56f-pldwt\" (UID: \"2fcc480a-55be-4437-b306-7d1e725dea45\") " pod="openshift-service-ca/service-ca-9c57cc56f-pldwt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623535 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/193bbb8a-6ebb-48fe-9d1f-25b25e990cb6-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-rxfr6\" (UID: \"193bbb8a-6ebb-48fe-9d1f-25b25e990cb6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623557 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-tr58j\" (UID: \"0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623585 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/73643213-4cfb-4d70-b821-e78cc379de15-console-serving-cert\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623609 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623625 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-registry-tls\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623644 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe48572b-ef54-4d09-bdc0-a14cfba2af08-config\") pod \"kube-apiserver-operator-766d6c64bb-xrfmk\" (UID: \"fe48572b-ef54-4d09-bdc0-a14cfba2af08\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623661 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-etcd-serving-ca\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623679 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/05472df6-c385-4574-ba3c-844fe282b74b-registry-certificates\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623698 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f5ls\" (UniqueName: \"kubernetes.io/projected/2fcc480a-55be-4437-b306-7d1e725dea45-kube-api-access-7f5ls\") pod \"service-ca-9c57cc56f-pldwt\" (UID: \"2fcc480a-55be-4437-b306-7d1e725dea45\") " pod="openshift-service-ca/service-ca-9c57cc56f-pldwt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623715 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k78qp\" (UniqueName: \"kubernetes.io/projected/d8e13d63-c1dd-4a68-bd18-b65592799f10-kube-api-access-k78qp\") pod \"control-plane-machine-set-operator-78cbb6b69f-5k9gm\" (UID: \"d8e13d63-c1dd-4a68-bd18-b65592799f10\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5k9gm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623734 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-serving-cert\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623750 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6nhq\" (UniqueName: \"kubernetes.io/projected/5aa32688-51a9-459f-84ab-9c46aacb71f5-kube-api-access-z6nhq\") pod \"olm-operator-6b444d44fb-sfc8c\" (UID: \"5aa32688-51a9-459f-84ab-9c46aacb71f5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623764 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x44vv\" (UniqueName: \"kubernetes.io/projected/f4083bd3-6146-4962-9aef-c7774a0c205a-kube-api-access-x44vv\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623784 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623801 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxdhv\" (UniqueName: \"kubernetes.io/projected/1aa3f88f-af84-47d7-84cd-0a195a373a57-kube-api-access-sxdhv\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623816 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-image-import-ca\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623832 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mks5\" (UniqueName: \"kubernetes.io/projected/087fdac3-53cf-47af-b3e3-3ffae331f5de-kube-api-access-4mks5\") pod \"machine-config-operator-74547568cd-wkgds\" (UID: \"087fdac3-53cf-47af-b3e3-3ffae331f5de\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623852 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-v5pt7\" (UID: \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623876 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-audit-dir\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624314 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d2e62b58-64d4-48d6-8e84-b2e2592671a3-profile-collector-cert\") pod \"catalog-operator-68c6474976-v95jm\" (UID: \"d2e62b58-64d4-48d6-8e84-b2e2592671a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624341 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/2fcc480a-55be-4437-b306-7d1e725dea45-signing-key\") pod \"service-ca-9c57cc56f-pldwt\" (UID: \"2fcc480a-55be-4437-b306-7d1e725dea45\") " pod="openshift-service-ca/service-ca-9c57cc56f-pldwt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624366 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-bound-sa-token\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624367 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2-trusted-ca\") pod \"console-operator-58897d9998-w9j8t\" (UID: \"1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2\") " pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624383 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-serving-cert\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624416 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624437 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ece480d8-b3fb-43be-86cb-8156b9dc7f8c-node-bootstrap-token\") pod \"machine-config-server-zvkmb\" (UID: \"ece480d8-b3fb-43be-86cb-8156b9dc7f8c\") " pod="openshift-machine-config-operator/machine-config-server-zvkmb" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624454 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-serving-cert\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624471 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2ce18217-b5e7-45ed-8343-32ac27c730a4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-l5qdp\" (UID: \"2ce18217-b5e7-45ed-8343-32ac27c730a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624486 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6e45939a-f804-4553-bced-da13026cdc92-metrics-certs\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624529 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b9n9\" (UniqueName: \"kubernetes.io/projected/d2e62b58-64d4-48d6-8e84-b2e2592671a3-kube-api-access-4b9n9\") pod \"catalog-operator-68c6474976-v95jm\" (UID: \"d2e62b58-64d4-48d6-8e84-b2e2592671a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624546 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/20a4713c-cab0-4783-951b-1607d1d64c1d-config-volume\") pod \"collect-profiles-29329920-jzqmv\" (UID: \"20a4713c-cab0-4783-951b-1607d1d64c1d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624563 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-client-ca\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624580 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-csi-data-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624598 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/03c4f1e9-30d9-4874-9fd6-f70af400d062-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6tpbj\" (UID: \"03c4f1e9-30d9-4874-9fd6-f70af400d062\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.624618 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9z7m\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-kube-api-access-h9z7m\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.623423 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2-config\") pod \"console-operator-58897d9998-w9j8t\" (UID: \"1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2\") " pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.625066 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/73643213-4cfb-4d70-b821-e78cc379de15-service-ca\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.626022 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-audit-dir\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.626355 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe48572b-ef54-4d09-bdc0-a14cfba2af08-config\") pod \"kube-apiserver-operator-766d6c64bb-xrfmk\" (UID: \"fe48572b-ef54-4d09-bdc0-a14cfba2af08\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.626646 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-image-import-ca\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.627181 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.627498 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1aa3f88f-af84-47d7-84cd-0a195a373a57-audit-dir\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.627799 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.628010 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/9a1fa4d9-9173-47fd-bc14-68317d5adfa4-serviceca\") pod \"image-pruner-29329920-xf4vz\" (UID: \"9a1fa4d9-9173-47fd-bc14-68317d5adfa4\") " pod="openshift-image-registry/image-pruner-29329920-xf4vz" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.628362 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/a1eca433-649b-4499-b5bf-f43123f0815f-etcd-ca\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.628362 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-audit-policies\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.628363 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.628840 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629036 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a9ec705-d37a-462b-b2d7-ea993046dfb4-serving-cert\") pod \"openshift-config-operator-7777fb866f-kwhf9\" (UID: \"0a9ec705-d37a-462b-b2d7-ea993046dfb4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629157 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-etcd-serving-ca\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629248 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3e41faa8-02fb-41b9-9214-89769fee9994-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pjfd2\" (UID: \"3e41faa8-02fb-41b9-9214-89769fee9994\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629290 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-audit\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629312 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ece480d8-b3fb-43be-86cb-8156b9dc7f8c-certs\") pod \"machine-config-server-zvkmb\" (UID: \"ece480d8-b3fb-43be-86cb-8156b9dc7f8c\") " pod="openshift-machine-config-operator/machine-config-server-zvkmb" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629332 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55885527-fa8a-4a03-8c8e-e0581a6d9bbd-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-x9qxv\" (UID: \"55885527-fa8a-4a03-8c8e-e0581a6d9bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629355 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/03c4f1e9-30d9-4874-9fd6-f70af400d062-trusted-ca\") pod \"ingress-operator-5b745b69d9-6tpbj\" (UID: \"03c4f1e9-30d9-4874-9fd6-f70af400d062\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629380 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3e41faa8-02fb-41b9-9214-89769fee9994-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pjfd2\" (UID: \"3e41faa8-02fb-41b9-9214-89769fee9994\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629421 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1eca433-649b-4499-b5bf-f43123f0815f-config\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629444 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plql2\" (UniqueName: \"kubernetes.io/projected/a1eca433-649b-4499-b5bf-f43123f0815f-kube-api-access-plql2\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629460 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a1eca433-649b-4499-b5bf-f43123f0815f-etcd-client\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629466 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxrs9\" (UniqueName: \"kubernetes.io/projected/0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7-kube-api-access-qxrs9\") pod \"openshift-apiserver-operator-796bbdcf4f-tr58j\" (UID: \"0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629517 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629522 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8a0573b-8704-46f7-8212-cead6f1911e8-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c87p6\" (UID: \"d8a0573b-8704-46f7-8212-cead6f1911e8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629613 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-client-ca\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629745 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05472df6-c385-4574-ba3c-844fe282b74b-trusted-ca\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629725 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-audit-policies\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629875 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-etcd-client\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.629880 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-node-pullsecrets\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.630196 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.631182 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.631210 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/a1eca433-649b-4499-b5bf-f43123f0815f-etcd-service-ca\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.631483 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3e41faa8-02fb-41b9-9214-89769fee9994-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pjfd2\" (UID: \"3e41faa8-02fb-41b9-9214-89769fee9994\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" Oct 07 00:12:45 crc kubenswrapper[4791]: E1007 00:12:45.631576 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:46.131552502 +0000 UTC m=+92.727490153 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.632200 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-audit\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.632966 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.633574 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/05472df6-c385-4574-ba3c-844fe282b74b-registry-certificates\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.633696 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/73643213-4cfb-4d70-b821-e78cc379de15-trusted-ca-bundle\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.633875 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54cp8\" (UniqueName: \"kubernetes.io/projected/55885527-fa8a-4a03-8c8e-e0581a6d9bbd-kube-api-access-54cp8\") pod \"openshift-controller-manager-operator-756b6f6bc6-x9qxv\" (UID: \"55885527-fa8a-4a03-8c8e-e0581a6d9bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.633906 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/05472df6-c385-4574-ba3c-844fe282b74b-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.633926 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/20a4713c-cab0-4783-951b-1607d1d64c1d-secret-volume\") pod \"collect-profiles-29329920-jzqmv\" (UID: \"20a4713c-cab0-4783-951b-1607d1d64c1d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.633941 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1eca433-649b-4499-b5bf-f43123f0815f-serving-cert\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.634504 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1eca433-649b-4499-b5bf-f43123f0815f-config\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.635253 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/05472df6-c385-4574-ba3c-844fe282b74b-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.635836 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.637190 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-encryption-config\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.637781 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-config\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.638299 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.638565 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3e41faa8-02fb-41b9-9214-89769fee9994-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pjfd2\" (UID: \"3e41faa8-02fb-41b9-9214-89769fee9994\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.639203 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-encryption-config\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.639256 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-registry-tls\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.639853 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-etcd-client\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.640538 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.641575 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/73643213-4cfb-4d70-b821-e78cc379de15-console-oauth-config\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.641594 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-tr58j\" (UID: \"0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.641649 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-serving-cert\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.642194 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.642322 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/05472df6-c385-4574-ba3c-844fe282b74b-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.642394 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe48572b-ef54-4d09-bdc0-a14cfba2af08-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xrfmk\" (UID: \"fe48572b-ef54-4d09-bdc0-a14cfba2af08\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.642733 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/73643213-4cfb-4d70-b821-e78cc379de15-console-serving-cert\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.642959 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/10bc1887-f9f1-4725-9402-22c515844975-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lp9bd\" (UID: \"10bc1887-f9f1-4725-9402-22c515844975\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.643106 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.643816 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2-serving-cert\") pod \"console-operator-58897d9998-w9j8t\" (UID: \"1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2\") " pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.645703 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-serving-cert\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.645890 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.646693 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-serving-cert\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.669385 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tc64j\" (UniqueName: \"kubernetes.io/projected/10bc1887-f9f1-4725-9402-22c515844975-kube-api-access-tc64j\") pod \"cluster-samples-operator-665b6dd947-lp9bd\" (UID: \"10bc1887-f9f1-4725-9402-22c515844975\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.690204 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwrdf\" (UniqueName: \"kubernetes.io/projected/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-kube-api-access-hwrdf\") pod \"controller-manager-879f6c89f-v8f7d\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.706580 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxdhv\" (UniqueName: \"kubernetes.io/projected/1aa3f88f-af84-47d7-84cd-0a195a373a57-kube-api-access-sxdhv\") pod \"oauth-openshift-558db77b4-pvkht\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.734704 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.734953 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6nhq\" (UniqueName: \"kubernetes.io/projected/5aa32688-51a9-459f-84ab-9c46aacb71f5-kube-api-access-z6nhq\") pod \"olm-operator-6b444d44fb-sfc8c\" (UID: \"5aa32688-51a9-459f-84ab-9c46aacb71f5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.734985 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x44vv\" (UniqueName: \"kubernetes.io/projected/f4083bd3-6146-4962-9aef-c7774a0c205a-kube-api-access-x44vv\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: E1007 00:12:45.735035 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:46.235000872 +0000 UTC m=+92.830938583 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.735090 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mks5\" (UniqueName: \"kubernetes.io/projected/087fdac3-53cf-47af-b3e3-3ffae331f5de-kube-api-access-4mks5\") pod \"machine-config-operator-74547568cd-wkgds\" (UID: \"087fdac3-53cf-47af-b3e3-3ffae331f5de\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.735131 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-v5pt7\" (UID: \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.735193 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d2e62b58-64d4-48d6-8e84-b2e2592671a3-profile-collector-cert\") pod \"catalog-operator-68c6474976-v95jm\" (UID: \"d2e62b58-64d4-48d6-8e84-b2e2592671a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.735258 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/2fcc480a-55be-4437-b306-7d1e725dea45-signing-key\") pod \"service-ca-9c57cc56f-pldwt\" (UID: \"2fcc480a-55be-4437-b306-7d1e725dea45\") " pod="openshift-service-ca/service-ca-9c57cc56f-pldwt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.735317 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ece480d8-b3fb-43be-86cb-8156b9dc7f8c-node-bootstrap-token\") pod \"machine-config-server-zvkmb\" (UID: \"ece480d8-b3fb-43be-86cb-8156b9dc7f8c\") " pod="openshift-machine-config-operator/machine-config-server-zvkmb" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.735670 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggmqp\" (UniqueName: \"kubernetes.io/projected/3e41faa8-02fb-41b9-9214-89769fee9994-kube-api-access-ggmqp\") pod \"cluster-image-registry-operator-dc59b4c8b-pjfd2\" (UID: \"3e41faa8-02fb-41b9-9214-89769fee9994\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.735795 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2ce18217-b5e7-45ed-8343-32ac27c730a4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-l5qdp\" (UID: \"2ce18217-b5e7-45ed-8343-32ac27c730a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.735835 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6e45939a-f804-4553-bced-da13026cdc92-metrics-certs\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.735861 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b9n9\" (UniqueName: \"kubernetes.io/projected/d2e62b58-64d4-48d6-8e84-b2e2592671a3-kube-api-access-4b9n9\") pod \"catalog-operator-68c6474976-v95jm\" (UID: \"d2e62b58-64d4-48d6-8e84-b2e2592671a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.735901 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/20a4713c-cab0-4783-951b-1607d1d64c1d-config-volume\") pod \"collect-profiles-29329920-jzqmv\" (UID: \"20a4713c-cab0-4783-951b-1607d1d64c1d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.735960 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-csi-data-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.735985 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/03c4f1e9-30d9-4874-9fd6-f70af400d062-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6tpbj\" (UID: \"03c4f1e9-30d9-4874-9fd6-f70af400d062\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736018 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ece480d8-b3fb-43be-86cb-8156b9dc7f8c-certs\") pod \"machine-config-server-zvkmb\" (UID: \"ece480d8-b3fb-43be-86cb-8156b9dc7f8c\") " pod="openshift-machine-config-operator/machine-config-server-zvkmb" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736045 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55885527-fa8a-4a03-8c8e-e0581a6d9bbd-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-x9qxv\" (UID: \"55885527-fa8a-4a03-8c8e-e0581a6d9bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736069 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/03c4f1e9-30d9-4874-9fd6-f70af400d062-trusted-ca\") pod \"ingress-operator-5b745b69d9-6tpbj\" (UID: \"03c4f1e9-30d9-4874-9fd6-f70af400d062\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736115 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8a0573b-8704-46f7-8212-cead6f1911e8-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c87p6\" (UID: \"d8a0573b-8704-46f7-8212-cead6f1911e8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736144 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54cp8\" (UniqueName: \"kubernetes.io/projected/55885527-fa8a-4a03-8c8e-e0581a6d9bbd-kube-api-access-54cp8\") pod \"openshift-controller-manager-operator-756b6f6bc6-x9qxv\" (UID: \"55885527-fa8a-4a03-8c8e-e0581a6d9bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736189 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/20a4713c-cab0-4783-951b-1607d1d64c1d-secret-volume\") pod \"collect-profiles-29329920-jzqmv\" (UID: \"20a4713c-cab0-4783-951b-1607d1d64c1d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736215 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a08b78ba-2e2b-4b92-9287-24d8f62065db-config\") pod \"service-ca-operator-777779d784-x2kmt\" (UID: \"a08b78ba-2e2b-4b92-9287-24d8f62065db\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736239 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4f9t\" (UniqueName: \"kubernetes.io/projected/ece480d8-b3fb-43be-86cb-8156b9dc7f8c-kube-api-access-t4f9t\") pod \"machine-config-server-zvkmb\" (UID: \"ece480d8-b3fb-43be-86cb-8156b9dc7f8c\") " pod="openshift-machine-config-operator/machine-config-server-zvkmb" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736279 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bb192277-2562-4d66-9f6b-0d3a672b2c91-cert\") pod \"ingress-canary-9rh96\" (UID: \"bb192277-2562-4d66-9f6b-0d3a672b2c91\") " pod="openshift-ingress-canary/ingress-canary-9rh96" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736355 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqlzx\" (UniqueName: \"kubernetes.io/projected/03c4f1e9-30d9-4874-9fd6-f70af400d062-kube-api-access-dqlzx\") pod \"ingress-operator-5b745b69d9-6tpbj\" (UID: \"03c4f1e9-30d9-4874-9fd6-f70af400d062\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736385 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ca76de13-3b68-41a3-a059-7e8f09c5d4b1-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-fs5qp\" (UID: \"ca76de13-3b68-41a3-a059-7e8f09c5d4b1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736431 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-mountpoint-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736459 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5nnr\" (UniqueName: \"kubernetes.io/projected/20a4713c-cab0-4783-951b-1607d1d64c1d-kube-api-access-f5nnr\") pod \"collect-profiles-29329920-jzqmv\" (UID: \"20a4713c-cab0-4783-951b-1607d1d64c1d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736486 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6e45939a-f804-4553-bced-da13026cdc92-stats-auth\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736525 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-plugins-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736532 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-csi-data-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736566 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/92e3eb26-afd9-4858-b403-f648c995f27e-tmpfs\") pod \"packageserver-d55dfcdfc-g26wx\" (UID: \"92e3eb26-afd9-4858-b403-f648c995f27e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736807 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82sxq\" (UniqueName: \"kubernetes.io/projected/9fa67b2a-f495-46f3-9f72-733f45966312-kube-api-access-82sxq\") pod \"dns-operator-744455d44c-kl5kc\" (UID: \"9fa67b2a-f495-46f3-9f72-733f45966312\") " pod="openshift-dns-operator/dns-operator-744455d44c-kl5kc" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736833 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cklph\" (UniqueName: \"kubernetes.io/projected/1a6878f8-306c-4cc8-bbd4-68bce70b0e02-kube-api-access-cklph\") pod \"multus-admission-controller-857f4d67dd-sgv8c\" (UID: \"1a6878f8-306c-4cc8-bbd4-68bce70b0e02\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sgv8c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736854 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/087fdac3-53cf-47af-b3e3-3ffae331f5de-images\") pod \"machine-config-operator-74547568cd-wkgds\" (UID: \"087fdac3-53cf-47af-b3e3-3ffae331f5de\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736873 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgvxw\" (UniqueName: \"kubernetes.io/projected/92e3eb26-afd9-4858-b403-f648c995f27e-kube-api-access-zgvxw\") pod \"packageserver-d55dfcdfc-g26wx\" (UID: \"92e3eb26-afd9-4858-b403-f648c995f27e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736897 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d8e13d63-c1dd-4a68-bd18-b65592799f10-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5k9gm\" (UID: \"d8e13d63-c1dd-4a68-bd18-b65592799f10\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5k9gm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736929 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1a6878f8-306c-4cc8-bbd4-68bce70b0e02-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sgv8c\" (UID: \"1a6878f8-306c-4cc8-bbd4-68bce70b0e02\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sgv8c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736945 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2ce18217-b5e7-45ed-8343-32ac27c730a4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-l5qdp\" (UID: \"2ce18217-b5e7-45ed-8343-32ac27c730a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.736950 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8a0573b-8704-46f7-8212-cead6f1911e8-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c87p6\" (UID: \"d8a0573b-8704-46f7-8212-cead6f1911e8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737023 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/03c4f1e9-30d9-4874-9fd6-f70af400d062-metrics-tls\") pod \"ingress-operator-5b745b69d9-6tpbj\" (UID: \"03c4f1e9-30d9-4874-9fd6-f70af400d062\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737027 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/92e3eb26-afd9-4858-b403-f648c995f27e-tmpfs\") pod \"packageserver-d55dfcdfc-g26wx\" (UID: \"92e3eb26-afd9-4858-b403-f648c995f27e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737048 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85ndm\" (UniqueName: \"kubernetes.io/projected/ca76de13-3b68-41a3-a059-7e8f09c5d4b1-kube-api-access-85ndm\") pod \"package-server-manager-789f6589d5-fs5qp\" (UID: \"ca76de13-3b68-41a3-a059-7e8f09c5d4b1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737073 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsvgl\" (UniqueName: \"kubernetes.io/projected/76b01d04-5f42-4ef1-93fe-75065fa32f3f-kube-api-access-bsvgl\") pod \"kube-storage-version-migrator-operator-b67b599dd-wcpj8\" (UID: \"76b01d04-5f42-4ef1-93fe-75065fa32f3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737092 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-registration-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737110 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vr9t\" (UniqueName: \"kubernetes.io/projected/c532c1dd-76ac-439b-8d24-d80260c10658-kube-api-access-2vr9t\") pod \"dns-default-667xs\" (UID: \"c532c1dd-76ac-439b-8d24-d80260c10658\") " pod="openshift-dns/dns-default-667xs" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737147 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d8a0573b-8704-46f7-8212-cead6f1911e8-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c87p6\" (UID: \"d8a0573b-8704-46f7-8212-cead6f1911e8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737167 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfpcc\" (UniqueName: \"kubernetes.io/projected/6e45939a-f804-4553-bced-da13026cdc92-kube-api-access-kfpcc\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737186 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5aa32688-51a9-459f-84ab-9c46aacb71f5-srv-cert\") pod \"olm-operator-6b444d44fb-sfc8c\" (UID: \"5aa32688-51a9-459f-84ab-9c46aacb71f5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737205 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6e45939a-f804-4553-bced-da13026cdc92-default-certificate\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737246 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/92e3eb26-afd9-4858-b403-f648c995f27e-webhook-cert\") pod \"packageserver-d55dfcdfc-g26wx\" (UID: \"92e3eb26-afd9-4858-b403-f648c995f27e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737269 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/193bbb8a-6ebb-48fe-9d1f-25b25e990cb6-config\") pod \"kube-controller-manager-operator-78b949d7b-rxfr6\" (UID: \"193bbb8a-6ebb-48fe-9d1f-25b25e990cb6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737287 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/087fdac3-53cf-47af-b3e3-3ffae331f5de-auth-proxy-config\") pod \"machine-config-operator-74547568cd-wkgds\" (UID: \"087fdac3-53cf-47af-b3e3-3ffae331f5de\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737302 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/193bbb8a-6ebb-48fe-9d1f-25b25e990cb6-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-rxfr6\" (UID: \"193bbb8a-6ebb-48fe-9d1f-25b25e990cb6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737321 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8a0573b-8704-46f7-8212-cead6f1911e8-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c87p6\" (UID: \"d8a0573b-8704-46f7-8212-cead6f1911e8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737352 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/20a4713c-cab0-4783-951b-1607d1d64c1d-config-volume\") pod \"collect-profiles-29329920-jzqmv\" (UID: \"20a4713c-cab0-4783-951b-1607d1d64c1d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.738504 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/03c4f1e9-30d9-4874-9fd6-f70af400d062-trusted-ca\") pod \"ingress-operator-5b745b69d9-6tpbj\" (UID: \"03c4f1e9-30d9-4874-9fd6-f70af400d062\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.737362 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/76b01d04-5f42-4ef1-93fe-75065fa32f3f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wcpj8\" (UID: \"76b01d04-5f42-4ef1-93fe-75065fa32f3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.740641 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ce18217-b5e7-45ed-8343-32ac27c730a4-proxy-tls\") pod \"machine-config-controller-84d6567774-l5qdp\" (UID: \"2ce18217-b5e7-45ed-8343-32ac27c730a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.740673 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55885527-fa8a-4a03-8c8e-e0581a6d9bbd-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-x9qxv\" (UID: \"55885527-fa8a-4a03-8c8e-e0581a6d9bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.740706 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjfcj\" (UniqueName: \"kubernetes.io/projected/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-kube-api-access-wjfcj\") pod \"marketplace-operator-79b997595-v5pt7\" (UID: \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.740758 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a08b78ba-2e2b-4b92-9287-24d8f62065db-serving-cert\") pod \"service-ca-operator-777779d784-x2kmt\" (UID: \"a08b78ba-2e2b-4b92-9287-24d8f62065db\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.740823 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/92e3eb26-afd9-4858-b403-f648c995f27e-apiservice-cert\") pod \"packageserver-d55dfcdfc-g26wx\" (UID: \"92e3eb26-afd9-4858-b403-f648c995f27e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.740894 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmj7n\" (UniqueName: \"kubernetes.io/projected/2ce18217-b5e7-45ed-8343-32ac27c730a4-kube-api-access-pmj7n\") pod \"machine-config-controller-84d6567774-l5qdp\" (UID: \"2ce18217-b5e7-45ed-8343-32ac27c730a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.740920 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-socket-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.740942 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-v5pt7\" (UID: \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.740967 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7zvj\" (UniqueName: \"kubernetes.io/projected/bb192277-2562-4d66-9f6b-0d3a672b2c91-kube-api-access-g7zvj\") pod \"ingress-canary-9rh96\" (UID: \"bb192277-2562-4d66-9f6b-0d3a672b2c91\") " pod="openshift-ingress-canary/ingress-canary-9rh96" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742119 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/76b01d04-5f42-4ef1-93fe-75065fa32f3f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wcpj8\" (UID: \"76b01d04-5f42-4ef1-93fe-75065fa32f3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742351 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d2e62b58-64d4-48d6-8e84-b2e2592671a3-srv-cert\") pod \"catalog-operator-68c6474976-v95jm\" (UID: \"d2e62b58-64d4-48d6-8e84-b2e2592671a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742383 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9fa67b2a-f495-46f3-9f72-733f45966312-metrics-tls\") pod \"dns-operator-744455d44c-kl5kc\" (UID: \"9fa67b2a-f495-46f3-9f72-733f45966312\") " pod="openshift-dns-operator/dns-operator-744455d44c-kl5kc" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742417 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-v5pt7\" (UID: \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742451 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xz95\" (UniqueName: \"kubernetes.io/projected/a08b78ba-2e2b-4b92-9287-24d8f62065db-kube-api-access-8xz95\") pod \"service-ca-operator-777779d784-x2kmt\" (UID: \"a08b78ba-2e2b-4b92-9287-24d8f62065db\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742456 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ece480d8-b3fb-43be-86cb-8156b9dc7f8c-certs\") pod \"machine-config-server-zvkmb\" (UID: \"ece480d8-b3fb-43be-86cb-8156b9dc7f8c\") " pod="openshift-machine-config-operator/machine-config-server-zvkmb" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742470 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5aa32688-51a9-459f-84ab-9c46aacb71f5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-sfc8c\" (UID: \"5aa32688-51a9-459f-84ab-9c46aacb71f5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742490 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e45939a-f804-4553-bced-da13026cdc92-service-ca-bundle\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742510 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c532c1dd-76ac-439b-8d24-d80260c10658-config-volume\") pod \"dns-default-667xs\" (UID: \"c532c1dd-76ac-439b-8d24-d80260c10658\") " pod="openshift-dns/dns-default-667xs" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742548 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz7kv\" (UniqueName: \"kubernetes.io/projected/cfc06b76-a4a9-419a-9079-2509c12dec45-kube-api-access-wz7kv\") pod \"migrator-59844c95c7-ngtt4\" (UID: \"cfc06b76-a4a9-419a-9079-2509c12dec45\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ngtt4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742567 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c532c1dd-76ac-439b-8d24-d80260c10658-metrics-tls\") pod \"dns-default-667xs\" (UID: \"c532c1dd-76ac-439b-8d24-d80260c10658\") " pod="openshift-dns/dns-default-667xs" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742598 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/087fdac3-53cf-47af-b3e3-3ffae331f5de-proxy-tls\") pod \"machine-config-operator-74547568cd-wkgds\" (UID: \"087fdac3-53cf-47af-b3e3-3ffae331f5de\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742630 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76b01d04-5f42-4ef1-93fe-75065fa32f3f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wcpj8\" (UID: \"76b01d04-5f42-4ef1-93fe-75065fa32f3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742886 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/2fcc480a-55be-4437-b306-7d1e725dea45-signing-cabundle\") pod \"service-ca-9c57cc56f-pldwt\" (UID: \"2fcc480a-55be-4437-b306-7d1e725dea45\") " pod="openshift-service-ca/service-ca-9c57cc56f-pldwt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.742980 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/193bbb8a-6ebb-48fe-9d1f-25b25e990cb6-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-rxfr6\" (UID: \"193bbb8a-6ebb-48fe-9d1f-25b25e990cb6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.743042 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.743061 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a08b78ba-2e2b-4b92-9287-24d8f62065db-config\") pod \"service-ca-operator-777779d784-x2kmt\" (UID: \"a08b78ba-2e2b-4b92-9287-24d8f62065db\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.743092 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f5ls\" (UniqueName: \"kubernetes.io/projected/2fcc480a-55be-4437-b306-7d1e725dea45-kube-api-access-7f5ls\") pod \"service-ca-9c57cc56f-pldwt\" (UID: \"2fcc480a-55be-4437-b306-7d1e725dea45\") " pod="openshift-service-ca/service-ca-9c57cc56f-pldwt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.743133 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k78qp\" (UniqueName: \"kubernetes.io/projected/d8e13d63-c1dd-4a68-bd18-b65592799f10-kube-api-access-k78qp\") pod \"control-plane-machine-set-operator-78cbb6b69f-5k9gm\" (UID: \"d8e13d63-c1dd-4a68-bd18-b65592799f10\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5k9gm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.743231 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ece480d8-b3fb-43be-86cb-8156b9dc7f8c-node-bootstrap-token\") pod \"machine-config-server-zvkmb\" (UID: \"ece480d8-b3fb-43be-86cb-8156b9dc7f8c\") " pod="openshift-machine-config-operator/machine-config-server-zvkmb" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.744042 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6e45939a-f804-4553-bced-da13026cdc92-metrics-certs\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.744394 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c532c1dd-76ac-439b-8d24-d80260c10658-config-volume\") pod \"dns-default-667xs\" (UID: \"c532c1dd-76ac-439b-8d24-d80260c10658\") " pod="openshift-dns/dns-default-667xs" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.744658 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6e45939a-f804-4553-bced-da13026cdc92-stats-auth\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.744769 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/2fcc480a-55be-4437-b306-7d1e725dea45-signing-key\") pod \"service-ca-9c57cc56f-pldwt\" (UID: \"2fcc480a-55be-4437-b306-7d1e725dea45\") " pod="openshift-service-ca/service-ca-9c57cc56f-pldwt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.744937 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/2fcc480a-55be-4437-b306-7d1e725dea45-signing-cabundle\") pod \"service-ca-9c57cc56f-pldwt\" (UID: \"2fcc480a-55be-4437-b306-7d1e725dea45\") " pod="openshift-service-ca/service-ca-9c57cc56f-pldwt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.744986 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e45939a-f804-4553-bced-da13026cdc92-service-ca-bundle\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.745070 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-socket-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.743047 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6e45939a-f804-4553-bced-da13026cdc92-default-certificate\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.745318 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d2e62b58-64d4-48d6-8e84-b2e2592671a3-profile-collector-cert\") pod \"catalog-operator-68c6474976-v95jm\" (UID: \"d2e62b58-64d4-48d6-8e84-b2e2592671a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" Oct 07 00:12:45 crc kubenswrapper[4791]: E1007 00:12:45.745633 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:46.245613178 +0000 UTC m=+92.841550839 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.746594 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c532c1dd-76ac-439b-8d24-d80260c10658-metrics-tls\") pod \"dns-default-667xs\" (UID: \"c532c1dd-76ac-439b-8d24-d80260c10658\") " pod="openshift-dns/dns-default-667xs" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.746622 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ca76de13-3b68-41a3-a059-7e8f09c5d4b1-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-fs5qp\" (UID: \"ca76de13-3b68-41a3-a059-7e8f09c5d4b1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.747132 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/03c4f1e9-30d9-4874-9fd6-f70af400d062-metrics-tls\") pod \"ingress-operator-5b745b69d9-6tpbj\" (UID: \"03c4f1e9-30d9-4874-9fd6-f70af400d062\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.747386 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/20a4713c-cab0-4783-951b-1607d1d64c1d-secret-volume\") pod \"collect-profiles-29329920-jzqmv\" (UID: \"20a4713c-cab0-4783-951b-1607d1d64c1d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.747884 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/087fdac3-53cf-47af-b3e3-3ffae331f5de-auth-proxy-config\") pod \"machine-config-operator-74547568cd-wkgds\" (UID: \"087fdac3-53cf-47af-b3e3-3ffae331f5de\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.747922 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-mountpoint-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.748129 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d2e62b58-64d4-48d6-8e84-b2e2592671a3-srv-cert\") pod \"catalog-operator-68c6474976-v95jm\" (UID: \"d2e62b58-64d4-48d6-8e84-b2e2592671a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.748758 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-registration-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.748822 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/087fdac3-53cf-47af-b3e3-3ffae331f5de-proxy-tls\") pod \"machine-config-operator-74547568cd-wkgds\" (UID: \"087fdac3-53cf-47af-b3e3-3ffae331f5de\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.749266 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/087fdac3-53cf-47af-b3e3-3ffae331f5de-images\") pod \"machine-config-operator-74547568cd-wkgds\" (UID: \"087fdac3-53cf-47af-b3e3-3ffae331f5de\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.749622 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76b01d04-5f42-4ef1-93fe-75065fa32f3f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wcpj8\" (UID: \"76b01d04-5f42-4ef1-93fe-75065fa32f3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.749966 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txlkg\" (UniqueName: \"kubernetes.io/projected/0a9ec705-d37a-462b-b2d7-ea993046dfb4-kube-api-access-txlkg\") pod \"openshift-config-operator-7777fb866f-kwhf9\" (UID: \"0a9ec705-d37a-462b-b2d7-ea993046dfb4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.750201 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9fa67b2a-f495-46f3-9f72-733f45966312-metrics-tls\") pod \"dns-operator-744455d44c-kl5kc\" (UID: \"9fa67b2a-f495-46f3-9f72-733f45966312\") " pod="openshift-dns-operator/dns-operator-744455d44c-kl5kc" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.750316 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-v5pt7\" (UID: \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.750698 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f4083bd3-6146-4962-9aef-c7774a0c205a-plugins-dir\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.751143 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d8e13d63-c1dd-4a68-bd18-b65592799f10-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5k9gm\" (UID: \"d8e13d63-c1dd-4a68-bd18-b65592799f10\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5k9gm" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.751422 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55885527-fa8a-4a03-8c8e-e0581a6d9bbd-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-x9qxv\" (UID: \"55885527-fa8a-4a03-8c8e-e0581a6d9bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.751910 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/193bbb8a-6ebb-48fe-9d1f-25b25e990cb6-config\") pod \"kube-controller-manager-operator-78b949d7b-rxfr6\" (UID: \"193bbb8a-6ebb-48fe-9d1f-25b25e990cb6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.752299 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8a0573b-8704-46f7-8212-cead6f1911e8-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c87p6\" (UID: \"d8a0573b-8704-46f7-8212-cead6f1911e8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.752340 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55885527-fa8a-4a03-8c8e-e0581a6d9bbd-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-x9qxv\" (UID: \"55885527-fa8a-4a03-8c8e-e0581a6d9bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.752611 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ce18217-b5e7-45ed-8343-32ac27c730a4-proxy-tls\") pod \"machine-config-controller-84d6567774-l5qdp\" (UID: \"2ce18217-b5e7-45ed-8343-32ac27c730a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.752633 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5aa32688-51a9-459f-84ab-9c46aacb71f5-srv-cert\") pod \"olm-operator-6b444d44fb-sfc8c\" (UID: \"5aa32688-51a9-459f-84ab-9c46aacb71f5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.752915 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5aa32688-51a9-459f-84ab-9c46aacb71f5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-sfc8c\" (UID: \"5aa32688-51a9-459f-84ab-9c46aacb71f5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.753655 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1a6878f8-306c-4cc8-bbd4-68bce70b0e02-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sgv8c\" (UID: \"1a6878f8-306c-4cc8-bbd4-68bce70b0e02\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sgv8c" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.754885 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/92e3eb26-afd9-4858-b403-f648c995f27e-apiservice-cert\") pod \"packageserver-d55dfcdfc-g26wx\" (UID: \"92e3eb26-afd9-4858-b403-f648c995f27e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.755269 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/92e3eb26-afd9-4858-b403-f648c995f27e-webhook-cert\") pod \"packageserver-d55dfcdfc-g26wx\" (UID: \"92e3eb26-afd9-4858-b403-f648c995f27e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.756639 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a08b78ba-2e2b-4b92-9287-24d8f62065db-serving-cert\") pod \"service-ca-operator-777779d784-x2kmt\" (UID: \"a08b78ba-2e2b-4b92-9287-24d8f62065db\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.757505 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/193bbb8a-6ebb-48fe-9d1f-25b25e990cb6-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-rxfr6\" (UID: \"193bbb8a-6ebb-48fe-9d1f-25b25e990cb6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.759492 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bb192277-2562-4d66-9f6b-0d3a672b2c91-cert\") pod \"ingress-canary-9rh96\" (UID: \"bb192277-2562-4d66-9f6b-0d3a672b2c91\") " pod="openshift-ingress-canary/ingress-canary-9rh96" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.767352 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fe48572b-ef54-4d09-bdc0-a14cfba2af08-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xrfmk\" (UID: \"fe48572b-ef54-4d09-bdc0-a14cfba2af08\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.789486 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3e41faa8-02fb-41b9-9214-89769fee9994-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pjfd2\" (UID: \"3e41faa8-02fb-41b9-9214-89769fee9994\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.815435 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp66q\" (UniqueName: \"kubernetes.io/projected/9a1fa4d9-9173-47fd-bc14-68317d5adfa4-kube-api-access-vp66q\") pod \"image-pruner-29329920-xf4vz\" (UID: \"9a1fa4d9-9173-47fd-bc14-68317d5adfa4\") " pod="openshift-image-registry/image-pruner-29329920-xf4vz" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.825298 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8hm9\" (UniqueName: \"kubernetes.io/projected/545d56df-b2a8-4c36-880e-bf3dfc43cf9c-kube-api-access-t8hm9\") pod \"apiserver-76f77b778f-zfd68\" (UID: \"545d56df-b2a8-4c36-880e-bf3dfc43cf9c\") " pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.843801 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:45 crc kubenswrapper[4791]: E1007 00:12:45.843890 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:46.343871499 +0000 UTC m=+92.939809140 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.844180 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: E1007 00:12:45.844605 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:46.34458963 +0000 UTC m=+92.940527281 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.847369 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.853961 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-692z7\" (UniqueName: \"kubernetes.io/projected/45f6e8b1-9956-4465-9bc4-5c4ca03f73da-kube-api-access-692z7\") pod \"apiserver-7bbb656c7d-c8gwl\" (UID: \"45f6e8b1-9956-4465-9bc4-5c4ca03f73da\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.876537 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-bound-sa-token\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.880607 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.888154 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxrs9\" (UniqueName: \"kubernetes.io/projected/0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7-kube-api-access-qxrs9\") pod \"openshift-apiserver-operator-796bbdcf4f-tr58j\" (UID: \"0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.892196 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29329920-xf4vz" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.913637 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9z7m\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-kube-api-access-h9z7m\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.927709 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mr5r2\" (UniqueName: \"kubernetes.io/projected/73643213-4cfb-4d70-b821-e78cc379de15-kube-api-access-mr5r2\") pod \"console-f9d7485db-slv5f\" (UID: \"73643213-4cfb-4d70-b821-e78cc379de15\") " pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.933111 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.944420 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.948787 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.949711 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:45 crc kubenswrapper[4791]: E1007 00:12:45.950495 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:46.45047473 +0000 UTC m=+93.046412381 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.955110 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.960680 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-477r5\" (UniqueName: \"kubernetes.io/projected/1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2-kube-api-access-477r5\") pod \"console-operator-58897d9998-w9j8t\" (UID: \"1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2\") " pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.960742 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.985173 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.989522 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vd4bw\" (UniqueName: \"kubernetes.io/projected/4cef9a77-b44f-41a4-87af-0e5230970af6-kube-api-access-vd4bw\") pod \"downloads-7954f5f757-4scm9\" (UID: \"4cef9a77-b44f-41a4-87af-0e5230970af6\") " pod="openshift-console/downloads-7954f5f757-4scm9" Oct 07 00:12:45 crc kubenswrapper[4791]: I1007 00:12:45.990675 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plql2\" (UniqueName: \"kubernetes.io/projected/a1eca433-649b-4499-b5bf-f43123f0815f-kube-api-access-plql2\") pod \"etcd-operator-b45778765-zc9tm\" (UID: \"a1eca433-649b-4499-b5bf-f43123f0815f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.030559 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mks5\" (UniqueName: \"kubernetes.io/projected/087fdac3-53cf-47af-b3e3-3ffae331f5de-kube-api-access-4mks5\") pod \"machine-config-operator-74547568cd-wkgds\" (UID: \"087fdac3-53cf-47af-b3e3-3ffae331f5de\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.051733 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:46 crc kubenswrapper[4791]: E1007 00:12:46.052255 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:46.552243432 +0000 UTC m=+93.148181083 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.056560 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6nhq\" (UniqueName: \"kubernetes.io/projected/5aa32688-51a9-459f-84ab-9c46aacb71f5-kube-api-access-z6nhq\") pod \"olm-operator-6b444d44fb-sfc8c\" (UID: \"5aa32688-51a9-459f-84ab-9c46aacb71f5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.058381 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v8f7d"] Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.067585 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x44vv\" (UniqueName: \"kubernetes.io/projected/f4083bd3-6146-4962-9aef-c7774a0c205a-kube-api-access-x44vv\") pod \"csi-hostpathplugin-nct9w\" (UID: \"f4083bd3-6146-4962-9aef-c7774a0c205a\") " pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.068097 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-4scm9" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.094676 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b9n9\" (UniqueName: \"kubernetes.io/projected/d2e62b58-64d4-48d6-8e84-b2e2592671a3-kube-api-access-4b9n9\") pod \"catalog-operator-68c6474976-v95jm\" (UID: \"d2e62b58-64d4-48d6-8e84-b2e2592671a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.113182 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5nnr\" (UniqueName: \"kubernetes.io/projected/20a4713c-cab0-4783-951b-1607d1d64c1d-kube-api-access-f5nnr\") pod \"collect-profiles-29329920-jzqmv\" (UID: \"20a4713c-cab0-4783-951b-1607d1d64c1d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.113543 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.138200 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.138754 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqlzx\" (UniqueName: \"kubernetes.io/projected/03c4f1e9-30d9-4874-9fd6-f70af400d062-kube-api-access-dqlzx\") pod \"ingress-operator-5b745b69d9-6tpbj\" (UID: \"03c4f1e9-30d9-4874-9fd6-f70af400d062\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.152758 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:46 crc kubenswrapper[4791]: E1007 00:12:46.154225 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:46.654201839 +0000 UTC m=+93.250139490 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.154360 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.157911 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54cp8\" (UniqueName: \"kubernetes.io/projected/55885527-fa8a-4a03-8c8e-e0581a6d9bbd-kube-api-access-54cp8\") pod \"openshift-controller-manager-operator-756b6f6bc6-x9qxv\" (UID: \"55885527-fa8a-4a03-8c8e-e0581a6d9bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.161505 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.167875 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.179798 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.193665 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zfd68"] Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.196844 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz7kv\" (UniqueName: \"kubernetes.io/projected/cfc06b76-a4a9-419a-9079-2509c12dec45-kube-api-access-wz7kv\") pod \"migrator-59844c95c7-ngtt4\" (UID: \"cfc06b76-a4a9-419a-9079-2509c12dec45\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ngtt4" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.202001 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/03c4f1e9-30d9-4874-9fd6-f70af400d062-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6tpbj\" (UID: \"03c4f1e9-30d9-4874-9fd6-f70af400d062\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.206723 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-nct9w" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.223343 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4f9t\" (UniqueName: \"kubernetes.io/projected/ece480d8-b3fb-43be-86cb-8156b9dc7f8c-kube-api-access-t4f9t\") pod \"machine-config-server-zvkmb\" (UID: \"ece480d8-b3fb-43be-86cb-8156b9dc7f8c\") " pod="openshift-machine-config-operator/machine-config-server-zvkmb" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.225788 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:46 crc kubenswrapper[4791]: W1007 00:12:46.235732 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod545d56df_b2a8_4c36_880e_bf3dfc43cf9c.slice/crio-75ce07ede32313c07e4a85ef15508e66cd4ae4923085f1882b0526f6a24b7e40 WatchSource:0}: Error finding container 75ce07ede32313c07e4a85ef15508e66cd4ae4923085f1882b0526f6a24b7e40: Status 404 returned error can't find the container with id 75ce07ede32313c07e4a85ef15508e66cd4ae4923085f1882b0526f6a24b7e40 Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.253822 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:46 crc kubenswrapper[4791]: E1007 00:12:46.254335 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:46.754319153 +0000 UTC m=+93.350256814 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.255729 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjfcj\" (UniqueName: \"kubernetes.io/projected/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-kube-api-access-wjfcj\") pod \"marketplace-operator-79b997595-v5pt7\" (UID: \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.262889 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmj7n\" (UniqueName: \"kubernetes.io/projected/2ce18217-b5e7-45ed-8343-32ac27c730a4-kube-api-access-pmj7n\") pod \"machine-config-controller-84d6567774-l5qdp\" (UID: \"2ce18217-b5e7-45ed-8343-32ac27c730a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.267809 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.282030 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k78qp\" (UniqueName: \"kubernetes.io/projected/d8e13d63-c1dd-4a68-bd18-b65592799f10-kube-api-access-k78qp\") pod \"control-plane-machine-set-operator-78cbb6b69f-5k9gm\" (UID: \"d8e13d63-c1dd-4a68-bd18-b65592799f10\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5k9gm" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.290668 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.295126 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/193bbb8a-6ebb-48fe-9d1f-25b25e990cb6-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-rxfr6\" (UID: \"193bbb8a-6ebb-48fe-9d1f-25b25e990cb6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.302063 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ngtt4" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.302933 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9"] Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.316633 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f5ls\" (UniqueName: \"kubernetes.io/projected/2fcc480a-55be-4437-b306-7d1e725dea45-kube-api-access-7f5ls\") pod \"service-ca-9c57cc56f-pldwt\" (UID: \"2fcc480a-55be-4437-b306-7d1e725dea45\") " pod="openshift-service-ca/service-ca-9c57cc56f-pldwt" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.334217 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.344815 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cklph\" (UniqueName: \"kubernetes.io/projected/1a6878f8-306c-4cc8-bbd4-68bce70b0e02-kube-api-access-cklph\") pod \"multus-admission-controller-857f4d67dd-sgv8c\" (UID: \"1a6878f8-306c-4cc8-bbd4-68bce70b0e02\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sgv8c" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.363278 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5k9gm" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.363420 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:46 crc kubenswrapper[4791]: E1007 00:12:46.363842 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:46.863825808 +0000 UTC m=+93.459763459 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.364098 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.381030 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d8a0573b-8704-46f7-8212-cead6f1911e8-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c87p6\" (UID: \"d8a0573b-8704-46f7-8212-cead6f1911e8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.385000 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-sgv8c" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.391282 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.407103 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vr9t\" (UniqueName: \"kubernetes.io/projected/c532c1dd-76ac-439b-8d24-d80260c10658-kube-api-access-2vr9t\") pod \"dns-default-667xs\" (UID: \"c532c1dd-76ac-439b-8d24-d80260c10658\") " pod="openshift-dns/dns-default-667xs" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.407275 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85ndm\" (UniqueName: \"kubernetes.io/projected/ca76de13-3b68-41a3-a059-7e8f09c5d4b1-kube-api-access-85ndm\") pod \"package-server-manager-789f6589d5-fs5qp\" (UID: \"ca76de13-3b68-41a3-a059-7e8f09c5d4b1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.418046 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.429883 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29329920-xf4vz"] Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.436309 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsvgl\" (UniqueName: \"kubernetes.io/projected/76b01d04-5f42-4ef1-93fe-75065fa32f3f-kube-api-access-bsvgl\") pod \"kube-storage-version-migrator-operator-b67b599dd-wcpj8\" (UID: \"76b01d04-5f42-4ef1-93fe-75065fa32f3f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.444504 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.452481 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xz95\" (UniqueName: \"kubernetes.io/projected/a08b78ba-2e2b-4b92-9287-24d8f62065db-kube-api-access-8xz95\") pod \"service-ca-operator-777779d784-x2kmt\" (UID: \"a08b78ba-2e2b-4b92-9287-24d8f62065db\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.462495 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-pldwt" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.464165 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7zvj\" (UniqueName: \"kubernetes.io/projected/bb192277-2562-4d66-9f6b-0d3a672b2c91-kube-api-access-g7zvj\") pod \"ingress-canary-9rh96\" (UID: \"bb192277-2562-4d66-9f6b-0d3a672b2c91\") " pod="openshift-ingress-canary/ingress-canary-9rh96" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.464575 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:46 crc kubenswrapper[4791]: E1007 00:12:46.464857 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:46.964843598 +0000 UTC m=+93.560781249 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.468330 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfpcc\" (UniqueName: \"kubernetes.io/projected/6e45939a-f804-4553-bced-da13026cdc92-kube-api-access-kfpcc\") pod \"router-default-5444994796-qrrrl\" (UID: \"6e45939a-f804-4553-bced-da13026cdc92\") " pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.481223 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9rh96" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.490748 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgvxw\" (UniqueName: \"kubernetes.io/projected/92e3eb26-afd9-4858-b403-f648c995f27e-kube-api-access-zgvxw\") pod \"packageserver-d55dfcdfc-g26wx\" (UID: \"92e3eb26-afd9-4858-b403-f648c995f27e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.514714 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-zvkmb" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.518750 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82sxq\" (UniqueName: \"kubernetes.io/projected/9fa67b2a-f495-46f3-9f72-733f45966312-kube-api-access-82sxq\") pod \"dns-operator-744455d44c-kl5kc\" (UID: \"9fa67b2a-f495-46f3-9f72-733f45966312\") " pod="openshift-dns-operator/dns-operator-744455d44c-kl5kc" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.520964 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-667xs" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.565691 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:46 crc kubenswrapper[4791]: E1007 00:12:46.566489 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:47.066463616 +0000 UTC m=+93.662401267 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.568873 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd"] Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.614583 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.615448 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2"] Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.615930 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.621644 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-kl5kc" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.656715 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.667795 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:46 crc kubenswrapper[4791]: E1007 00:12:46.668209 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:47.168192747 +0000 UTC m=+93.764130398 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.671792 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.680335 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zfd68" event={"ID":"545d56df-b2a8-4c36-880e-bf3dfc43cf9c","Type":"ContainerStarted","Data":"75ce07ede32313c07e4a85ef15508e66cd4ae4923085f1882b0526f6a24b7e40"} Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.683651 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" event={"ID":"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c","Type":"ContainerStarted","Data":"92cdde62e67b9a895a7093cf8b93d48dcdac34d8cc7e40f15e81b2982ba493cd"} Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.683682 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" event={"ID":"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c","Type":"ContainerStarted","Data":"d35222976507e135690133a2c6e12b80a5817a7865abdce64d2056e543e307c0"} Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.685965 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.691945 4791 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-v8f7d container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.692004 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" podUID="88d9e4e8-2e0f-4ede-8b83-94f75153ae8c" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.692540 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" event={"ID":"0a9ec705-d37a-462b-b2d7-ea993046dfb4","Type":"ContainerStarted","Data":"4050770f908b7171be3c5c3a12dad693797f9d7ecfde4637f9e359ff323dcc9c"} Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.701420 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.707618 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29329920-xf4vz" event={"ID":"9a1fa4d9-9173-47fd-bc14-68317d5adfa4","Type":"ContainerStarted","Data":"4afdea2907d4158988ecbb6bfc28a8279fd8409e7bfc688471beb6a595c5952c"} Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.714664 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:12:46 crc kubenswrapper[4791]: W1007 00:12:46.744748 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podece480d8_b3fb_43be_86cb_8156b9dc7f8c.slice/crio-2ae334486649f6bd63f9260fd0621d17f7c5f028fa16c7dc2a2b6530412d7ca2 WatchSource:0}: Error finding container 2ae334486649f6bd63f9260fd0621d17f7c5f028fa16c7dc2a2b6530412d7ca2: Status 404 returned error can't find the container with id 2ae334486649f6bd63f9260fd0621d17f7c5f028fa16c7dc2a2b6530412d7ca2 Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.768796 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:46 crc kubenswrapper[4791]: E1007 00:12:46.769064 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:47.269028082 +0000 UTC m=+93.864965733 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.769221 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:46 crc kubenswrapper[4791]: E1007 00:12:46.770000 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:47.269990939 +0000 UTC m=+93.865928600 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.807038 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl"] Oct 07 00:12:46 crc kubenswrapper[4791]: W1007 00:12:46.844816 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e45939a_f804_4553_bced_da13026cdc92.slice/crio-f10b0ab44e783b6456ad5040afbcc135600fdafd5a143e0c8795ad16fc8fea93 WatchSource:0}: Error finding container f10b0ab44e783b6456ad5040afbcc135600fdafd5a143e0c8795ad16fc8fea93: Status 404 returned error can't find the container with id f10b0ab44e783b6456ad5040afbcc135600fdafd5a143e0c8795ad16fc8fea93 Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.871047 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:46 crc kubenswrapper[4791]: E1007 00:12:46.875653 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:47.375633533 +0000 UTC m=+93.971571174 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.884997 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-4scm9"] Oct 07 00:12:46 crc kubenswrapper[4791]: W1007 00:12:46.941078 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4cef9a77_b44f_41a4_87af_0e5230970af6.slice/crio-dec35f5231ccd478bb7473ec73e1b46c178ba7096709080d24fe8f686a3ee9c5 WatchSource:0}: Error finding container dec35f5231ccd478bb7473ec73e1b46c178ba7096709080d24fe8f686a3ee9c5: Status 404 returned error can't find the container with id dec35f5231ccd478bb7473ec73e1b46c178ba7096709080d24fe8f686a3ee9c5 Oct 07 00:12:46 crc kubenswrapper[4791]: I1007 00:12:46.974551 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:46 crc kubenswrapper[4791]: E1007 00:12:46.974959 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:47.474945294 +0000 UTC m=+94.070882935 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.076005 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:47 crc kubenswrapper[4791]: E1007 00:12:47.076563 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:47.576541271 +0000 UTC m=+94.172478922 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.122145 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.177490 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:47 crc kubenswrapper[4791]: E1007 00:12:47.177810 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:47.677800248 +0000 UTC m=+94.273737899 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.278257 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:47 crc kubenswrapper[4791]: E1007 00:12:47.278624 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:47.778608532 +0000 UTC m=+94.374546183 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.380008 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:47 crc kubenswrapper[4791]: E1007 00:12:47.380891 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:47.880876379 +0000 UTC m=+94.476814020 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.429180 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk"] Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.431165 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv"] Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.432941 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm"] Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.445282 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" podStartSLOduration=72.445253973 podStartE2EDuration="1m12.445253973s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:47.444882653 +0000 UTC m=+94.040820304" watchObservedRunningTime="2025-10-07 00:12:47.445253973 +0000 UTC m=+94.041191624" Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.451233 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pvkht"] Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.492913 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:47 crc kubenswrapper[4791]: E1007 00:12:47.493490 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:47.993476472 +0000 UTC m=+94.589414123 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.596937 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:47 crc kubenswrapper[4791]: E1007 00:12:47.598028 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:48.098006094 +0000 UTC m=+94.693943855 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.622993 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-6hgql" podStartSLOduration=73.622962243 podStartE2EDuration="1m13.622962243s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:47.618269748 +0000 UTC m=+94.214207399" watchObservedRunningTime="2025-10-07 00:12:47.622962243 +0000 UTC m=+94.218899894" Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.638901 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c"] Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.644288 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zc9tm"] Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.675183 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-w9j8t"] Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.720694 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:47 crc kubenswrapper[4791]: E1007 00:12:47.721315 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:48.221300106 +0000 UTC m=+94.817237757 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.723806 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds"] Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.726707 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-v5pt7"] Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.730706 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5k9gm"] Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.745905 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29329920-xf4vz" event={"ID":"9a1fa4d9-9173-47fd-bc14-68317d5adfa4","Type":"ContainerStarted","Data":"917c4f304e0849ec653d3dd1b5204c47e02863093db4b8bd70c0d0bbe00a5864"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.777015 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd" event={"ID":"10bc1887-f9f1-4725-9402-22c515844975","Type":"ContainerStarted","Data":"d53dbcf24e28da93b067a03f9423ab2c8090cf5076db9b60b0125410ac596c71"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.777056 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd" event={"ID":"10bc1887-f9f1-4725-9402-22c515844975","Type":"ContainerStarted","Data":"130d01fb13c8dd541d087e4fc14a5aa9aa2ead8294e5367ec66a7c474cd76f2b"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.797584 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-zvkmb" event={"ID":"ece480d8-b3fb-43be-86cb-8156b9dc7f8c","Type":"ContainerStarted","Data":"f78edc5e24d730165553fcdfc40f2d9ca18ec46fb2d7a0bead9f79f8f1b1b10e"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.797635 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-zvkmb" event={"ID":"ece480d8-b3fb-43be-86cb-8156b9dc7f8c","Type":"ContainerStarted","Data":"2ae334486649f6bd63f9260fd0621d17f7c5f028fa16c7dc2a2b6530412d7ca2"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.823811 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:47 crc kubenswrapper[4791]: E1007 00:12:47.824102 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:48.324063925 +0000 UTC m=+94.920001576 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.833583 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5k9gm" event={"ID":"d8e13d63-c1dd-4a68-bd18-b65592799f10","Type":"ContainerStarted","Data":"677d8849aaa0515653b68596de11cd5286a3843f206a536f1e66376679222359"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.848911 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk" event={"ID":"fe48572b-ef54-4d09-bdc0-a14cfba2af08","Type":"ContainerStarted","Data":"54728b620ec22dae46328c1b8b8c9e3132c69fa41c389e088bf111f7e4ec5c5c"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.850837 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" event={"ID":"5aa32688-51a9-459f-84ab-9c46aacb71f5","Type":"ContainerStarted","Data":"f5ec756e13913b79354f9d811be57fb9090dc3164d7d3616379feba27a8b08c1"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.854469 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-4scm9" event={"ID":"4cef9a77-b44f-41a4-87af-0e5230970af6","Type":"ContainerStarted","Data":"6fdcd92dd887c2130de36aeac38a9b3d35e880fcffd89d42d9eec94c3809f9ff"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.854509 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-4scm9" event={"ID":"4cef9a77-b44f-41a4-87af-0e5230970af6","Type":"ContainerStarted","Data":"dec35f5231ccd478bb7473ec73e1b46c178ba7096709080d24fe8f686a3ee9c5"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.856108 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-4scm9" Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.858207 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" event={"ID":"d2e62b58-64d4-48d6-8e84-b2e2592671a3","Type":"ContainerStarted","Data":"896d09bcb7c96291d03adaf9df8a4e02587746dbb5bb110021ce210de93f9ae3"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.858954 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.859021 4791 patch_prober.go:28] interesting pod/downloads-7954f5f757-4scm9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.859045 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4scm9" podUID="4cef9a77-b44f-41a4-87af-0e5230970af6" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.862172 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-w9j8t" event={"ID":"1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2","Type":"ContainerStarted","Data":"0c2c269617d1f67d8e75dfbb3179c78f24a733c2983e47428aed6c429c468eaf"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.865257 4791 generic.go:334] "Generic (PLEG): container finished" podID="545d56df-b2a8-4c36-880e-bf3dfc43cf9c" containerID="846a2f58a829a556d0574606fc91fcaed9285f031b5cf5640a63eaca084fa921" exitCode=0 Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.865452 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zfd68" event={"ID":"545d56df-b2a8-4c36-880e-bf3dfc43cf9c","Type":"ContainerDied","Data":"846a2f58a829a556d0574606fc91fcaed9285f031b5cf5640a63eaca084fa921"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.868002 4791 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-v95jm container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:8443/healthz\": dial tcp 10.217.0.37:8443: connect: connection refused" start-of-body= Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.868038 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" podUID="d2e62b58-64d4-48d6-8e84-b2e2592671a3" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.37:8443/healthz\": dial tcp 10.217.0.37:8443: connect: connection refused" Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.887354 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-frzvc" podStartSLOduration=72.887142793 podStartE2EDuration="1m12.887142793s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:47.886631728 +0000 UTC m=+94.482569379" watchObservedRunningTime="2025-10-07 00:12:47.887142793 +0000 UTC m=+94.483080444" Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.895713 4791 generic.go:334] "Generic (PLEG): container finished" podID="0a9ec705-d37a-462b-b2d7-ea993046dfb4" containerID="a97b637af127e7fc789fb8b11a93684578d643fe6d6418c62c67ee9461f5c54a" exitCode=0 Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.896104 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" event={"ID":"0a9ec705-d37a-462b-b2d7-ea993046dfb4","Type":"ContainerDied","Data":"a97b637af127e7fc789fb8b11a93684578d643fe6d6418c62c67ee9461f5c54a"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.905046 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-qrrrl" event={"ID":"6e45939a-f804-4553-bced-da13026cdc92","Type":"ContainerStarted","Data":"61f7da92312ca7970082f90d5cac737e50a71699184a23278ea015d59f1a051d"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.905428 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-qrrrl" event={"ID":"6e45939a-f804-4553-bced-da13026cdc92","Type":"ContainerStarted","Data":"f10b0ab44e783b6456ad5040afbcc135600fdafd5a143e0c8795ad16fc8fea93"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.921042 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" event={"ID":"3e41faa8-02fb-41b9-9214-89769fee9994","Type":"ContainerStarted","Data":"2ea3691b37d35522d91f4c959437a9bb595ca4416d5c459bccefe7ce23859ae7"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.921091 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" event={"ID":"3e41faa8-02fb-41b9-9214-89769fee9994","Type":"ContainerStarted","Data":"580768081fc6a58b9deaa294c802fb548a82ff6d19a659447167f39b0b573138"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.925018 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:47 crc kubenswrapper[4791]: E1007 00:12:47.926075 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:48.426022983 +0000 UTC m=+95.021960694 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.958858 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" event={"ID":"82a62ffc-29d8-4597-a18c-6e13dbd2cce3","Type":"ContainerStarted","Data":"d87215121109a24119a6199338d27ae221d7228b8cd63792052870f085dedf91"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.987480 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" event={"ID":"a1eca433-649b-4499-b5bf-f43123f0815f","Type":"ContainerStarted","Data":"75b49419843436fb2a8055005cc4502f7080e6255861f18cdf7fac9e95d7affb"} Oct 07 00:12:47 crc kubenswrapper[4791]: I1007 00:12:47.995248 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" event={"ID":"20a4713c-cab0-4783-951b-1607d1d64c1d","Type":"ContainerStarted","Data":"46f69b52d7f0e24585c4506acdd72738156d8c7af9050f78d76a2c882ff6ec41"} Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.009972 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=1.009954721 podStartE2EDuration="1.009954721s" podCreationTimestamp="2025-10-07 00:12:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:48.006544202 +0000 UTC m=+94.602481863" watchObservedRunningTime="2025-10-07 00:12:48.009954721 +0000 UTC m=+94.605892382" Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.017150 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9rh96"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.022281 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" event={"ID":"087fdac3-53cf-47af-b3e3-3ffae331f5de","Type":"ContainerStarted","Data":"f9f20d18159bd2ac92ea31e879ba268d1585603ac545deac8d90bbd42d5e7259"} Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.033544 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" event={"ID":"1aa3f88f-af84-47d7-84cd-0a195a373a57","Type":"ContainerStarted","Data":"1ae9a28714efb7ad3ff774aaf6bc3d998d15f9efddde55212ce1dc7400cf803a"} Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.036891 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:48 crc kubenswrapper[4791]: E1007 00:12:48.041331 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:48.541312014 +0000 UTC m=+95.137249655 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.068806 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ngtt4"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.072506 4791 generic.go:334] "Generic (PLEG): container finished" podID="45f6e8b1-9956-4465-9bc4-5c4ca03f73da" containerID="3b0780589cb009fc6459d9f588a5db1d43ee5c52f1123ec8d7c8b4f87d4b7676" exitCode=0 Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.105173 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.105222 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" event={"ID":"45f6e8b1-9956-4465-9bc4-5c4ca03f73da","Type":"ContainerDied","Data":"3b0780589cb009fc6459d9f588a5db1d43ee5c52f1123ec8d7c8b4f87d4b7676"} Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.105291 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" event={"ID":"45f6e8b1-9956-4465-9bc4-5c4ca03f73da","Type":"ContainerStarted","Data":"d44a4c58db571de97a771c0039855ab2f1aae77080324639f733117550714bbc"} Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.105365 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.145726 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:48 crc kubenswrapper[4791]: E1007 00:12:48.145987 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:48.645950049 +0000 UTC m=+95.241887700 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.146181 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:48 crc kubenswrapper[4791]: E1007 00:12:48.155485 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:48.655444722 +0000 UTC m=+95.251382373 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:48 crc kubenswrapper[4791]: W1007 00:12:48.166500 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod55885527_fa8a_4a03_8c8e_e0581a6d9bbd.slice/crio-a4b12f61034eb53fc8f0d33e9b0d537ca8549c6e349d51893f75cd6e3a3c5d05 WatchSource:0}: Error finding container a4b12f61034eb53fc8f0d33e9b0d537ca8549c6e349d51893f75cd6e3a3c5d05: Status 404 returned error can't find the container with id a4b12f61034eb53fc8f0d33e9b0d537ca8549c6e349d51893f75cd6e3a3c5d05 Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.189522 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sgv8c"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.189568 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.193362 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-667xs"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.199497 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.199561 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.218735 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zmzdt" podStartSLOduration=74.218713415 podStartE2EDuration="1m14.218713415s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:48.214277257 +0000 UTC m=+94.810214908" watchObservedRunningTime="2025-10-07 00:12:48.218713415 +0000 UTC m=+94.814651066" Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.228623 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.233448 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-pldwt"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.256684 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:48 crc kubenswrapper[4791]: E1007 00:12:48.257187 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:48.757170033 +0000 UTC m=+95.353107684 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.284515 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-nct9w"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.289036 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-slv5f"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.291485 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.292720 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-kl5kc"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.304228 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.305078 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" podStartSLOduration=73.305044532 podStartE2EDuration="1m13.305044532s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:48.275011277 +0000 UTC m=+94.870948928" watchObservedRunningTime="2025-10-07 00:12:48.305044532 +0000 UTC m=+94.900982183" Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.313662 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.339157 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.355749 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6"] Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.361414 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:48 crc kubenswrapper[4791]: E1007 00:12:48.361771 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:48.861757856 +0000 UTC m=+95.457695507 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.444725 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" podStartSLOduration=74.444697325 podStartE2EDuration="1m14.444697325s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:48.442559914 +0000 UTC m=+95.038497585" watchObservedRunningTime="2025-10-07 00:12:48.444697325 +0000 UTC m=+95.040634976" Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.462806 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:48 crc kubenswrapper[4791]: E1007 00:12:48.463364 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:48.963331362 +0000 UTC m=+95.559269013 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.547319 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-4scm9" podStartSLOduration=73.547300631 podStartE2EDuration="1m13.547300631s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:48.523139645 +0000 UTC m=+95.119077306" watchObservedRunningTime="2025-10-07 00:12:48.547300631 +0000 UTC m=+95.143238282" Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.566965 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:48 crc kubenswrapper[4791]: E1007 00:12:48.568536 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:49.0684277 +0000 UTC m=+95.664365351 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.588759 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-zvkmb" podStartSLOduration=5.588712094 podStartE2EDuration="5.588712094s" podCreationTimestamp="2025-10-07 00:12:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:48.547696483 +0000 UTC m=+95.143634134" watchObservedRunningTime="2025-10-07 00:12:48.588712094 +0000 UTC m=+95.184649735" Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.596861 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-pruner-29329920-xf4vz" podStartSLOduration=74.596838098 podStartE2EDuration="1m14.596838098s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:48.588444217 +0000 UTC m=+95.184381868" watchObservedRunningTime="2025-10-07 00:12:48.596838098 +0000 UTC m=+95.192775749" Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.616504 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.647648 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:12:48 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:12:48 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:12:48 crc kubenswrapper[4791]: healthz check failed Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.647708 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.668746 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:48 crc kubenswrapper[4791]: E1007 00:12:48.669102 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:49.16908011 +0000 UTC m=+95.765017761 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.686158 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pjfd2" podStartSLOduration=73.686136621 podStartE2EDuration="1m13.686136621s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:48.650766012 +0000 UTC m=+95.246703663" watchObservedRunningTime="2025-10-07 00:12:48.686136621 +0000 UTC m=+95.282074272" Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.687572 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" podStartSLOduration=73.687567992 podStartE2EDuration="1m13.687567992s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:48.686631295 +0000 UTC m=+95.282568946" watchObservedRunningTime="2025-10-07 00:12:48.687567992 +0000 UTC m=+95.283505643" Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.734716 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-qrrrl" podStartSLOduration=73.7346836 podStartE2EDuration="1m13.7346836s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:48.732691102 +0000 UTC m=+95.328628753" watchObservedRunningTime="2025-10-07 00:12:48.7346836 +0000 UTC m=+95.330621251" Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.771233 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:48 crc kubenswrapper[4791]: E1007 00:12:48.771829 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:49.271787299 +0000 UTC m=+95.867724950 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.872788 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:48 crc kubenswrapper[4791]: E1007 00:12:48.873555 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:49.37352662 +0000 UTC m=+95.969464281 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:48 crc kubenswrapper[4791]: I1007 00:12:48.975110 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:48 crc kubenswrapper[4791]: E1007 00:12:48.975858 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:49.475846677 +0000 UTC m=+96.071784328 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.079174 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:49 crc kubenswrapper[4791]: E1007 00:12:49.079814 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:49.579793722 +0000 UTC m=+96.175731373 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.079890 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:49 crc kubenswrapper[4791]: E1007 00:12:49.080713 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:49.580690568 +0000 UTC m=+96.176628219 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.143868 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv" event={"ID":"55885527-fa8a-4a03-8c8e-e0581a6d9bbd","Type":"ContainerStarted","Data":"8ae868ce87ec678a07bd1f462e317cfdd195dcd7b69c77226ef756ee63e16b06"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.143936 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv" event={"ID":"55885527-fa8a-4a03-8c8e-e0581a6d9bbd","Type":"ContainerStarted","Data":"a4b12f61034eb53fc8f0d33e9b0d537ca8549c6e349d51893f75cd6e3a3c5d05"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.162608 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt" event={"ID":"a08b78ba-2e2b-4b92-9287-24d8f62065db","Type":"ContainerStarted","Data":"d858047e373ce56457a5359b9b4390926025235df5d3dad0361e714e57639480"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.164660 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9rh96" event={"ID":"bb192277-2562-4d66-9f6b-0d3a672b2c91","Type":"ContainerStarted","Data":"78040d6f160a387c8692abe081f02bd7420424893af42aff2f28ca57581973a6"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.164688 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9rh96" event={"ID":"bb192277-2562-4d66-9f6b-0d3a672b2c91","Type":"ContainerStarted","Data":"42f7246e4fea643f386535a67503aad763438fa1adfe98420778055ab2d9ff04"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.185227 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:49 crc kubenswrapper[4791]: E1007 00:12:49.185408 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:49.685366003 +0000 UTC m=+96.281303644 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.185498 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5k9gm" event={"ID":"d8e13d63-c1dd-4a68-bd18-b65592799f10","Type":"ContainerStarted","Data":"ad472ae7e69db64d62b03cfe7e85ef7483d9cb4c88ada2e155d528d0f87f21ec"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.187932 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:49 crc kubenswrapper[4791]: E1007 00:12:49.189499 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:49.689484272 +0000 UTC m=+96.285421923 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.197352 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" event={"ID":"1aa3f88f-af84-47d7-84cd-0a195a373a57","Type":"ContainerStarted","Data":"b6dedb9db3b689d52bbf99d733366cae89ca3eb0d9cf7c8813becb11d8ac1ada"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.198519 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.209166 4791 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-pvkht container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.22:6443/healthz\": dial tcp 10.217.0.22:6443: connect: connection refused" start-of-body= Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.209238 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" podUID="1aa3f88f-af84-47d7-84cd-0a195a373a57" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.22:6443/healthz\": dial tcp 10.217.0.22:6443: connect: connection refused" Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.227726 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nct9w" event={"ID":"f4083bd3-6146-4962-9aef-c7774a0c205a","Type":"ContainerStarted","Data":"fe5c117a6eb77d260e83bec2e9585520d3845abdcb362faf645481c046c640ff"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.300193 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:49 crc kubenswrapper[4791]: E1007 00:12:49.300507 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:49.800471739 +0000 UTC m=+96.396409390 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.307371 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6" event={"ID":"193bbb8a-6ebb-48fe-9d1f-25b25e990cb6","Type":"ContainerStarted","Data":"de2d7842f0dbc8ec4307aaec5f2d2de3e19424b2df2c4bca6355d72561c4f8db"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.309189 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:49 crc kubenswrapper[4791]: E1007 00:12:49.317439 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:49.817378696 +0000 UTC m=+96.413316347 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.361814 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" event={"ID":"20a4713c-cab0-4783-951b-1607d1d64c1d","Type":"ContainerStarted","Data":"418ea0752f565fba538fef2fe1725799c4d64d12e37b40a844dcce2e0b448c36"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.411355 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:49 crc kubenswrapper[4791]: E1007 00:12:49.411692 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:49.911675113 +0000 UTC m=+96.507612764 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.446420 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" event={"ID":"03c4f1e9-30d9-4874-9fd6-f70af400d062","Type":"ContainerStarted","Data":"2a077abe41b69b7e055353b26c9e8265cc5571ab6d69ad7d070e8c41464164a2"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.446766 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" event={"ID":"03c4f1e9-30d9-4874-9fd6-f70af400d062","Type":"ContainerStarted","Data":"e5da066ef0351fb99eebbadff9b5cd61e457c6541ae4954438789387595811bc"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.491919 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-w9j8t" event={"ID":"1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2","Type":"ContainerStarted","Data":"fd17ada1add1563ccc12b7cfdaa499d60961c0d30d28742644b98788277d58a6"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.493128 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.496467 4791 patch_prober.go:28] interesting pod/console-operator-58897d9998-w9j8t container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.496669 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-w9j8t" podUID="1d6fbb8b-5aa2-4b54-bf45-8b9ee48048e2" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.512938 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:49 crc kubenswrapper[4791]: E1007 00:12:49.513943 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:50.013927719 +0000 UTC m=+96.609865370 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.544726 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" event={"ID":"087fdac3-53cf-47af-b3e3-3ffae331f5de","Type":"ContainerStarted","Data":"dea75f14253af1c709f5da573fc24b7e8b1a3a5d52aa385a202b9e5b0a57f491"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.561132 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" event={"ID":"5aa32688-51a9-459f-84ab-9c46aacb71f5","Type":"ContainerStarted","Data":"21e39502ae94eb12883774f38dda6108cbe5c3d3081891574b537d73e22d20ec"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.562248 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.565827 4791 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-sfc8c container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.41:8443/healthz\": dial tcp 10.217.0.41:8443: connect: connection refused" start-of-body= Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.565927 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" podUID="5aa32688-51a9-459f-84ab-9c46aacb71f5" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.41:8443/healthz\": dial tcp 10.217.0.41:8443: connect: connection refused" Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.617986 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:49 crc kubenswrapper[4791]: E1007 00:12:49.621128 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:50.121087526 +0000 UTC m=+96.717025177 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.636826 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk" event={"ID":"fe48572b-ef54-4d09-bdc0-a14cfba2af08","Type":"ContainerStarted","Data":"7306bba6aeda7a7c9f7469b54c46c8cb83baccac3373920fa596e97347a18a6a"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.636886 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:12:49 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:12:49 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:12:49 crc kubenswrapper[4791]: healthz check failed Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.636971 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.659824 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd" event={"ID":"10bc1887-f9f1-4725-9402-22c515844975","Type":"ContainerStarted","Data":"b6860a762cbd2bfd50ba1e12cc8c96b8cb2df8b230e6a3d6b2960d3a1f326545"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.698519 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ngtt4" event={"ID":"cfc06b76-a4a9-419a-9079-2509c12dec45","Type":"ContainerStarted","Data":"b3690f15d9e68b734c1ed9c9f345f641088a4c46fb90093d74185fdeb86ec2c2"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.722413 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:49 crc kubenswrapper[4791]: E1007 00:12:49.724699 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:50.224683401 +0000 UTC m=+96.820621042 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.751279 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" podStartSLOduration=74.751261236 podStartE2EDuration="1m14.751261236s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:49.750936457 +0000 UTC m=+96.346874108" watchObservedRunningTime="2025-10-07 00:12:49.751261236 +0000 UTC m=+96.347198887" Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.752545 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-pldwt" event={"ID":"2fcc480a-55be-4437-b306-7d1e725dea45","Type":"ContainerStarted","Data":"c5fb1dbb6f48203468a1edb4cb323d70fd2d87f7947a8c2104a2ee4c8eaf05d5"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.775504 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" event={"ID":"d2e62b58-64d4-48d6-8e84-b2e2592671a3","Type":"ContainerStarted","Data":"3f2b76e25fa690a2420277d969bc8107a0b9bd49fcf3e0e2f95e8f2fa685b139"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.779067 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6" event={"ID":"d8a0573b-8704-46f7-8212-cead6f1911e8","Type":"ContainerStarted","Data":"0974548b9283d4a5473e29efbfff23805245f6cdfdb66e25b465ef9735575520"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.780190 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" event={"ID":"a1eca433-649b-4499-b5bf-f43123f0815f","Type":"ContainerStarted","Data":"da7e92ae823ccb6b87a0787ee6f25b2972a5eb0f3e945adf7265a7640a1102df"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.791798 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-v95jm" Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.793865 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" podStartSLOduration=74.793840023 podStartE2EDuration="1m14.793840023s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:49.783996059 +0000 UTC m=+96.379933710" watchObservedRunningTime="2025-10-07 00:12:49.793840023 +0000 UTC m=+96.389777674" Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.824824 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-9rh96" podStartSLOduration=6.824782044 podStartE2EDuration="6.824782044s" podCreationTimestamp="2025-10-07 00:12:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:49.819364858 +0000 UTC m=+96.415302509" watchObservedRunningTime="2025-10-07 00:12:49.824782044 +0000 UTC m=+96.420719695" Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.830232 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.830986 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-slv5f" event={"ID":"73643213-4cfb-4d70-b821-e78cc379de15","Type":"ContainerStarted","Data":"5eda2844cce8c49ddb59e124a0cce4523c2f84ebce94ad9e97bfc91a43262bba"} Oct 07 00:12:49 crc kubenswrapper[4791]: E1007 00:12:49.832258 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:50.332241789 +0000 UTC m=+96.928179440 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.914207 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp" event={"ID":"ca76de13-3b68-41a3-a059-7e8f09c5d4b1","Type":"ContainerStarted","Data":"a8e9cbeea2f44a6c883cb1426bf37a04093669645f853138b3fbd2bfcd65d298"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.915950 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5k9gm" podStartSLOduration=74.9159305 podStartE2EDuration="1m14.9159305s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:49.91177107 +0000 UTC m=+96.507708731" watchObservedRunningTime="2025-10-07 00:12:49.9159305 +0000 UTC m=+96.511868151" Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.916419 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-w9j8t" podStartSLOduration=75.916395334 podStartE2EDuration="1m15.916395334s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:49.878737789 +0000 UTC m=+96.474675430" watchObservedRunningTime="2025-10-07 00:12:49.916395334 +0000 UTC m=+96.512332985" Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.925276 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sgv8c" event={"ID":"1a6878f8-306c-4cc8-bbd4-68bce70b0e02","Type":"ContainerStarted","Data":"eb1970146b86e3271daed4b33292f2e4f20a8ca2bef1c30e16479c097715837b"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.937218 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:49 crc kubenswrapper[4791]: E1007 00:12:49.939045 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:50.439031496 +0000 UTC m=+97.034969147 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.982212 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-x9qxv" podStartSLOduration=74.982171689 podStartE2EDuration="1m14.982171689s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:49.97527698 +0000 UTC m=+96.571214631" watchObservedRunningTime="2025-10-07 00:12:49.982171689 +0000 UTC m=+96.578109330" Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.982656 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-kl5kc" event={"ID":"9fa67b2a-f495-46f3-9f72-733f45966312","Type":"ContainerStarted","Data":"d4a280b081dfe37d3470728c756cd4a1b2e48706f90aaa47dffbbfb7318ed392"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.994178 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" event={"ID":"0a9ec705-d37a-462b-b2d7-ea993046dfb4","Type":"ContainerStarted","Data":"8083c2c3b154baa534cc7d6316ac6a6581f7eadb54147520e95ffb8f954c0d05"} Oct 07 00:12:49 crc kubenswrapper[4791]: I1007 00:12:49.995465 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.027814 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" podStartSLOduration=76.027798263 podStartE2EDuration="1m16.027798263s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:50.02735745 +0000 UTC m=+96.623295101" watchObservedRunningTime="2025-10-07 00:12:50.027798263 +0000 UTC m=+96.623735914" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.038926 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:50 crc kubenswrapper[4791]: E1007 00:12:50.039487 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:50.539461719 +0000 UTC m=+97.135399370 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.066864 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" event={"ID":"92e3eb26-afd9-4858-b403-f648c995f27e","Type":"ContainerStarted","Data":"66a57b567cb2212c01b2142e8fe103dc7a282c8bfe70f60be4e6148fc4bab174"} Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.066922 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.080193 4791 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-g26wx container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.20:5443/healthz\": dial tcp 10.217.0.20:5443: connect: connection refused" start-of-body= Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.080264 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" podUID="92e3eb26-afd9-4858-b403-f648c995f27e" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.20:5443/healthz\": dial tcp 10.217.0.20:5443: connect: connection refused" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.097537 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" event={"ID":"45f6e8b1-9956-4465-9bc4-5c4ca03f73da","Type":"ContainerStarted","Data":"6645a10105879edd716f2874c1cdf59c493cce9fc77f03e4540b4bf92741b445"} Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.130492 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zfd68" event={"ID":"545d56df-b2a8-4c36-880e-bf3dfc43cf9c","Type":"ContainerStarted","Data":"3eb04038e655feee08ff5b6d7c2a1d19573f300a2ba4fe2aae8f3df1eb47c9e6"} Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.140752 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:50 crc kubenswrapper[4791]: E1007 00:12:50.144214 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:50.644196886 +0000 UTC m=+97.240134537 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.169341 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j" event={"ID":"0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7","Type":"ContainerStarted","Data":"cf82a64aee29df9ce39b03ee2875467ac35b09661bcc3df971324f36943691e0"} Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.201799 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-667xs" event={"ID":"c532c1dd-76ac-439b-8d24-d80260c10658","Type":"ContainerStarted","Data":"76733f4b04b161d0c912e0e3024d1a3ed3bd2dec539cfd842809f988c35a4466"} Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.242684 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:50 crc kubenswrapper[4791]: E1007 00:12:50.245036 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:50.74497678 +0000 UTC m=+97.340914431 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.251207 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:50 crc kubenswrapper[4791]: E1007 00:12:50.252988 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:50.75297264 +0000 UTC m=+97.348910291 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.256456 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8" event={"ID":"76b01d04-5f42-4ef1-93fe-75065fa32f3f","Type":"ContainerStarted","Data":"422939e16351a4fd8d1c91719bacb32d91966296cf267dd1f6f0e2b91ff89b87"} Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.278912 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xrfmk" podStartSLOduration=75.278873846 podStartE2EDuration="1m15.278873846s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:50.14397829 +0000 UTC m=+96.739915941" watchObservedRunningTime="2025-10-07 00:12:50.278873846 +0000 UTC m=+96.874811497" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.280023 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lp9bd" podStartSLOduration=76.280016209 podStartE2EDuration="1m16.280016209s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:50.241432998 +0000 UTC m=+96.837370649" watchObservedRunningTime="2025-10-07 00:12:50.280016209 +0000 UTC m=+96.875953860" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.301300 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" event={"ID":"2ce18217-b5e7-45ed-8343-32ac27c730a4","Type":"ContainerStarted","Data":"4e1a543b7c9e7401a8592da355e6e091687ab797079925b764a03d059e27c7d1"} Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.327319 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" event={"ID":"82a62ffc-29d8-4597-a18c-6e13dbd2cce3","Type":"ContainerStarted","Data":"ba745c5c3cbf03040a5667e3ba681c9fa91a1db9bc39bb183444bb607f6a45b3"} Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.329171 4791 patch_prober.go:28] interesting pod/downloads-7954f5f757-4scm9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.329257 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4scm9" podUID="4cef9a77-b44f-41a4-87af-0e5230970af6" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.356489 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:50 crc kubenswrapper[4791]: E1007 00:12:50.356915 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:50.856898034 +0000 UTC m=+97.452835685 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.371274 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-zc9tm" podStartSLOduration=75.371241487 podStartE2EDuration="1m15.371241487s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:50.355048401 +0000 UTC m=+96.950986062" watchObservedRunningTime="2025-10-07 00:12:50.371241487 +0000 UTC m=+96.967179138" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.374533 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" podStartSLOduration=75.374525152 podStartE2EDuration="1m15.374525152s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:50.30087764 +0000 UTC m=+96.896815291" watchObservedRunningTime="2025-10-07 00:12:50.374525152 +0000 UTC m=+96.970462803" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.459421 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:50 crc kubenswrapper[4791]: E1007 00:12:50.463575 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:50.963559367 +0000 UTC m=+97.559497018 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.468880 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-slv5f" podStartSLOduration=76.468855389 podStartE2EDuration="1m16.468855389s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:50.467062288 +0000 UTC m=+97.062999939" watchObservedRunningTime="2025-10-07 00:12:50.468855389 +0000 UTC m=+97.064793040" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.478618 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-pldwt" podStartSLOduration=75.4786026 podStartE2EDuration="1m15.4786026s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:50.422650608 +0000 UTC m=+97.018588259" watchObservedRunningTime="2025-10-07 00:12:50.4786026 +0000 UTC m=+97.074540251" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.515163 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j" podStartSLOduration=76.515136713 podStartE2EDuration="1m16.515136713s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:50.514636498 +0000 UTC m=+97.110574149" watchObservedRunningTime="2025-10-07 00:12:50.515136713 +0000 UTC m=+97.111074364" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.574979 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:50 crc kubenswrapper[4791]: E1007 00:12:50.575512 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:51.075488221 +0000 UTC m=+97.671425872 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.633903 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:12:50 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:12:50 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:12:50 crc kubenswrapper[4791]: healthz check failed Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.633947 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.642312 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" podStartSLOduration=76.642296486 podStartE2EDuration="1m16.642296486s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:50.600262145 +0000 UTC m=+97.196199796" watchObservedRunningTime="2025-10-07 00:12:50.642296486 +0000 UTC m=+97.238234137" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.643068 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" podStartSLOduration=75.643064228 podStartE2EDuration="1m15.643064228s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:50.641596756 +0000 UTC m=+97.237534427" watchObservedRunningTime="2025-10-07 00:12:50.643064228 +0000 UTC m=+97.239001879" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.676437 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:50 crc kubenswrapper[4791]: E1007 00:12:50.676749 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:51.176733778 +0000 UTC m=+97.772671429 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.778009 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:50 crc kubenswrapper[4791]: E1007 00:12:50.778948 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:51.278932093 +0000 UTC m=+97.874869744 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.882256 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:50 crc kubenswrapper[4791]: E1007 00:12:50.882746 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:51.382730693 +0000 UTC m=+97.978668344 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.956638 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.956714 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:50 crc kubenswrapper[4791]: I1007 00:12:50.983336 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:50 crc kubenswrapper[4791]: E1007 00:12:50.983868 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:51.483844436 +0000 UTC m=+98.079782087 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.085711 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:51 crc kubenswrapper[4791]: E1007 00:12:51.086225 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:51.586199955 +0000 UTC m=+98.182137606 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.187224 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:51 crc kubenswrapper[4791]: E1007 00:12:51.188171 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:51.688154012 +0000 UTC m=+98.284091663 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.288856 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:51 crc kubenswrapper[4791]: E1007 00:12:51.289288 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:51.789266595 +0000 UTC m=+98.385204426 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.291067 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" podStartSLOduration=76.291049996 podStartE2EDuration="1m16.291049996s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:50.676969815 +0000 UTC m=+97.272907476" watchObservedRunningTime="2025-10-07 00:12:51.291049996 +0000 UTC m=+97.886987647" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.294009 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pmlfq"] Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.295220 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.298272 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.316649 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pmlfq"] Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.345625 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" event={"ID":"92e3eb26-afd9-4858-b403-f648c995f27e","Type":"ContainerStarted","Data":"dd29e13a7d2f8de1a1e9e46c38af5a1a4a04806044c5d2af18a3d1fb3d939d75"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.347023 4791 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-g26wx container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.20:5443/healthz\": dial tcp 10.217.0.20:5443: connect: connection refused" start-of-body= Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.347129 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" podUID="92e3eb26-afd9-4858-b403-f648c995f27e" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.20:5443/healthz\": dial tcp 10.217.0.20:5443: connect: connection refused" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.348822 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-kl5kc" event={"ID":"9fa67b2a-f495-46f3-9f72-733f45966312","Type":"ContainerStarted","Data":"4b5d4eabf19aacf4a822b7b9895ea993f6c4fcd818145cec9cccc874795c7741"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.348887 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-kl5kc" event={"ID":"9fa67b2a-f495-46f3-9f72-733f45966312","Type":"ContainerStarted","Data":"74c4101914b243036818a481f9313eb1bd4ad0fa327568a616263a18fd3b419d"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.350487 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp" event={"ID":"ca76de13-3b68-41a3-a059-7e8f09c5d4b1","Type":"ContainerStarted","Data":"289f6d6bb5c2a437f080cddba80b3533120b977371d321cba4b97ab871958155"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.350526 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp" event={"ID":"ca76de13-3b68-41a3-a059-7e8f09c5d4b1","Type":"ContainerStarted","Data":"8872eb04a17562c28b09cac141167a9ace344d82f569d4da56d54d78f550296d"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.350587 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.352384 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8" event={"ID":"76b01d04-5f42-4ef1-93fe-75065fa32f3f","Type":"ContainerStarted","Data":"18b8da26d9c125a9124ccba066593a55cbc2f353467ef8bb019a2de9b231827b"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.376534 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" event={"ID":"03c4f1e9-30d9-4874-9fd6-f70af400d062","Type":"ContainerStarted","Data":"6a70ce2739a83c9d0f03f89a35e29c7158dca35d5cef4a17b91ebc0558d990b7"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.388995 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wkgds" event={"ID":"087fdac3-53cf-47af-b3e3-3ffae331f5de","Type":"ContainerStarted","Data":"bb12ec99bafacb236c826448d480f024cbaf3a737c7eca8d762ab90dda04f9c5"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.391042 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.391559 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7cjc\" (UniqueName: \"kubernetes.io/projected/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-kube-api-access-t7cjc\") pod \"community-operators-pmlfq\" (UID: \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\") " pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.391891 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-catalog-content\") pod \"community-operators-pmlfq\" (UID: \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\") " pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.392212 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-utilities\") pod \"community-operators-pmlfq\" (UID: \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\") " pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:12:51 crc kubenswrapper[4791]: E1007 00:12:51.397745 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:51.897721258 +0000 UTC m=+98.493658909 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.400074 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-kl5kc" podStartSLOduration=76.400038625 podStartE2EDuration="1m16.400038625s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:51.389521523 +0000 UTC m=+97.985459174" watchObservedRunningTime="2025-10-07 00:12:51.400038625 +0000 UTC m=+97.995976276" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.425862 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sgv8c" event={"ID":"1a6878f8-306c-4cc8-bbd4-68bce70b0e02","Type":"ContainerStarted","Data":"92010d72d7ae5921789b4c001b3222dc7f06313ce7c00da89004bf0f03be79ca"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.425916 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sgv8c" event={"ID":"1a6878f8-306c-4cc8-bbd4-68bce70b0e02","Type":"ContainerStarted","Data":"7e3c0cf4cf88136c504eae50f49dd3f60bb4d933677251b0e7b00689f883f45b"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.430598 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp" podStartSLOduration=76.430583065 podStartE2EDuration="1m16.430583065s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:51.427593839 +0000 UTC m=+98.023531490" watchObservedRunningTime="2025-10-07 00:12:51.430583065 +0000 UTC m=+98.026520716" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.449598 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-667xs" event={"ID":"c532c1dd-76ac-439b-8d24-d80260c10658","Type":"ContainerStarted","Data":"a7a5f0b867a8b790ed6167ba184a8d0a09d438f58f221871bf777b275ef5bb18"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.449666 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-667xs" event={"ID":"c532c1dd-76ac-439b-8d24-d80260c10658","Type":"ContainerStarted","Data":"1fb949849c2e07c801266da9a8f1e6b6e434ebac4874a4f179a33f7ad79670c8"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.449873 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-667xs" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.473115 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t7lmb"] Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.474161 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wcpj8" podStartSLOduration=76.4741291 podStartE2EDuration="1m16.4741291s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:51.468352123 +0000 UTC m=+98.064289774" watchObservedRunningTime="2025-10-07 00:12:51.4741291 +0000 UTC m=+98.070066751" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.474251 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.474992 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt" event={"ID":"a08b78ba-2e2b-4b92-9287-24d8f62065db","Type":"ContainerStarted","Data":"0a69d7a493d9f4c2e677559b99b97d29c272223f4b5ff1483e02385edf40fb73"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.484942 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.493170 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-catalog-content\") pod \"community-operators-pmlfq\" (UID: \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\") " pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.493305 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-utilities\") pod \"community-operators-pmlfq\" (UID: \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\") " pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.493362 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7cjc\" (UniqueName: \"kubernetes.io/projected/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-kube-api-access-t7cjc\") pod \"community-operators-pmlfq\" (UID: \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\") " pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.493463 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.495254 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-catalog-content\") pod \"community-operators-pmlfq\" (UID: \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\") " pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.496033 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-utilities\") pod \"community-operators-pmlfq\" (UID: \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\") " pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:12:51 crc kubenswrapper[4791]: E1007 00:12:51.497943 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:51.997448091 +0000 UTC m=+98.593385742 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.502871 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t7lmb"] Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.519692 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" event={"ID":"2ce18217-b5e7-45ed-8343-32ac27c730a4","Type":"ContainerStarted","Data":"54679e8668d8017e330bec5eeb9a251d85e5895de5d8f2c26de3636a9226659d"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.519751 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" event={"ID":"2ce18217-b5e7-45ed-8343-32ac27c730a4","Type":"ContainerStarted","Data":"387f75ec10355d8184224b1710b7a630009863a7b4136984b6f4b781fbdb1777"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.521656 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6tpbj" podStartSLOduration=76.521634468 podStartE2EDuration="1m16.521634468s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:51.492799057 +0000 UTC m=+98.088736708" watchObservedRunningTime="2025-10-07 00:12:51.521634468 +0000 UTC m=+98.117572129" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.570824 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7cjc\" (UniqueName: \"kubernetes.io/projected/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-kube-api-access-t7cjc\") pod \"community-operators-pmlfq\" (UID: \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\") " pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.571535 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-slv5f" event={"ID":"73643213-4cfb-4d70-b821-e78cc379de15","Type":"ContainerStarted","Data":"31bc2f8cd829c1aed1ae1d7d6aa5b02015dc42c0b58063b433872133839da313"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.593600 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-sgv8c" podStartSLOduration=76.593578321 podStartE2EDuration="1m16.593578321s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:51.542950382 +0000 UTC m=+98.138888043" watchObservedRunningTime="2025-10-07 00:12:51.593578321 +0000 UTC m=+98.189515982" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.608956 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.609161 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpd86\" (UniqueName: \"kubernetes.io/projected/969bde5b-aa90-48e4-9352-76feaaabdd8e-kube-api-access-kpd86\") pod \"certified-operators-t7lmb\" (UID: \"969bde5b-aa90-48e4-9352-76feaaabdd8e\") " pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.609287 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/969bde5b-aa90-48e4-9352-76feaaabdd8e-utilities\") pod \"certified-operators-t7lmb\" (UID: \"969bde5b-aa90-48e4-9352-76feaaabdd8e\") " pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.609366 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/969bde5b-aa90-48e4-9352-76feaaabdd8e-catalog-content\") pod \"certified-operators-t7lmb\" (UID: \"969bde5b-aa90-48e4-9352-76feaaabdd8e\") " pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:12:51 crc kubenswrapper[4791]: E1007 00:12:51.609947 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:52.109926002 +0000 UTC m=+98.705863653 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.620771 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zfd68" event={"ID":"545d56df-b2a8-4c36-880e-bf3dfc43cf9c","Type":"ContainerStarted","Data":"8bcd8e3dc4b5d762623faf4df3a36bbd2fb62f7d26ce2c8e965810d2f4d6a213"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.621504 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.648147 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5qdp" podStartSLOduration=76.648128512 podStartE2EDuration="1m16.648128512s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:51.608337356 +0000 UTC m=+98.204275017" watchObservedRunningTime="2025-10-07 00:12:51.648128512 +0000 UTC m=+98.244066163" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.650656 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:12:51 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:12:51 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:12:51 crc kubenswrapper[4791]: healthz check failed Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.650698 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.666737 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mpd57"] Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.667471 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-667xs" podStartSLOduration=8.667454639 podStartE2EDuration="8.667454639s" podCreationTimestamp="2025-10-07 00:12:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:51.651480289 +0000 UTC m=+98.247417960" watchObservedRunningTime="2025-10-07 00:12:51.667454639 +0000 UTC m=+98.263392290" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.667794 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.672857 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6" event={"ID":"d8a0573b-8704-46f7-8212-cead6f1911e8","Type":"ContainerStarted","Data":"b9c73cd6df6e39067ee06f9628c957e0087c32ae69a947cea2fddb9d18b372d5"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.683704 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mpd57"] Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.685549 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x2kmt" podStartSLOduration=76.68553355 podStartE2EDuration="1m16.68553355s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:51.684076968 +0000 UTC m=+98.280014619" watchObservedRunningTime="2025-10-07 00:12:51.68553355 +0000 UTC m=+98.281471201" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.706299 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tr58j" event={"ID":"0e5ed7bf-bb84-4b52-8e55-5c5452ae69c7","Type":"ContainerStarted","Data":"cbd7d5efa0a4ec9ada84045d7efd075ad4e9e74cb5c99e0ceb94329506492b06"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.710223 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da2f49ee-105d-43d7-82f3-4735f1693e90-utilities\") pod \"community-operators-mpd57\" (UID: \"da2f49ee-105d-43d7-82f3-4735f1693e90\") " pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.710264 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rtmb\" (UniqueName: \"kubernetes.io/projected/da2f49ee-105d-43d7-82f3-4735f1693e90-kube-api-access-8rtmb\") pod \"community-operators-mpd57\" (UID: \"da2f49ee-105d-43d7-82f3-4735f1693e90\") " pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.710320 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/969bde5b-aa90-48e4-9352-76feaaabdd8e-catalog-content\") pod \"certified-operators-t7lmb\" (UID: \"969bde5b-aa90-48e4-9352-76feaaabdd8e\") " pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.710343 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da2f49ee-105d-43d7-82f3-4735f1693e90-catalog-content\") pod \"community-operators-mpd57\" (UID: \"da2f49ee-105d-43d7-82f3-4735f1693e90\") " pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.710376 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpd86\" (UniqueName: \"kubernetes.io/projected/969bde5b-aa90-48e4-9352-76feaaabdd8e-kube-api-access-kpd86\") pod \"certified-operators-t7lmb\" (UID: \"969bde5b-aa90-48e4-9352-76feaaabdd8e\") " pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.710542 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/969bde5b-aa90-48e4-9352-76feaaabdd8e-utilities\") pod \"certified-operators-t7lmb\" (UID: \"969bde5b-aa90-48e4-9352-76feaaabdd8e\") " pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.710568 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.713376 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/969bde5b-aa90-48e4-9352-76feaaabdd8e-catalog-content\") pod \"certified-operators-t7lmb\" (UID: \"969bde5b-aa90-48e4-9352-76feaaabdd8e\") " pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:12:51 crc kubenswrapper[4791]: E1007 00:12:51.718918 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:52.218898191 +0000 UTC m=+98.814836032 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.719392 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/969bde5b-aa90-48e4-9352-76feaaabdd8e-utilities\") pod \"certified-operators-t7lmb\" (UID: \"969bde5b-aa90-48e4-9352-76feaaabdd8e\") " pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.740801 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ngtt4" event={"ID":"cfc06b76-a4a9-419a-9079-2509c12dec45","Type":"ContainerStarted","Data":"674f0f18ad1455359f660bd0b3794282be863fce7749228000bd48688fe993d3"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.740867 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ngtt4" event={"ID":"cfc06b76-a4a9-419a-9079-2509c12dec45","Type":"ContainerStarted","Data":"8d4e6b701325d3a618c8486dfbebf863c6af662cdf386317274bbea0a17796fe"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.760653 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpd86\" (UniqueName: \"kubernetes.io/projected/969bde5b-aa90-48e4-9352-76feaaabdd8e-kube-api-access-kpd86\") pod \"certified-operators-t7lmb\" (UID: \"969bde5b-aa90-48e4-9352-76feaaabdd8e\") " pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.776091 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-pldwt" event={"ID":"2fcc480a-55be-4437-b306-7d1e725dea45","Type":"ContainerStarted","Data":"e8c552f3f0ce41ca7b72cc195ed85efe75b62454208ce2e0a80c68433cfb2f08"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.789931 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6" event={"ID":"193bbb8a-6ebb-48fe-9d1f-25b25e990cb6","Type":"ContainerStarted","Data":"6f1a49405f61067488bebec83862537a600a65fab9deb92d494ab7adb1d6a125"} Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.792666 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.795717 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.797131 4791 patch_prober.go:28] interesting pod/downloads-7954f5f757-4scm9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.797169 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4scm9" podUID="4cef9a77-b44f-41a4-87af-0e5230970af6" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.799459 4791 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-v5pt7 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.799518 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" podUID="82a62ffc-29d8-4597-a18c-6e13dbd2cce3" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.816637 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-zfd68" podStartSLOduration=77.816612456 podStartE2EDuration="1m17.816612456s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:51.809625095 +0000 UTC m=+98.405562746" watchObservedRunningTime="2025-10-07 00:12:51.816612456 +0000 UTC m=+98.412550107" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.819396 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.819791 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da2f49ee-105d-43d7-82f3-4735f1693e90-utilities\") pod \"community-operators-mpd57\" (UID: \"da2f49ee-105d-43d7-82f3-4735f1693e90\") " pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.819879 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rtmb\" (UniqueName: \"kubernetes.io/projected/da2f49ee-105d-43d7-82f3-4735f1693e90-kube-api-access-8rtmb\") pod \"community-operators-mpd57\" (UID: \"da2f49ee-105d-43d7-82f3-4735f1693e90\") " pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.819955 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da2f49ee-105d-43d7-82f3-4735f1693e90-catalog-content\") pod \"community-operators-mpd57\" (UID: \"da2f49ee-105d-43d7-82f3-4735f1693e90\") " pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:12:51 crc kubenswrapper[4791]: E1007 00:12:51.821067 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:52.321043434 +0000 UTC m=+98.916981085 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.821244 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da2f49ee-105d-43d7-82f3-4735f1693e90-utilities\") pod \"community-operators-mpd57\" (UID: \"da2f49ee-105d-43d7-82f3-4735f1693e90\") " pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.821616 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da2f49ee-105d-43d7-82f3-4735f1693e90-catalog-content\") pod \"community-operators-mpd57\" (UID: \"da2f49ee-105d-43d7-82f3-4735f1693e90\") " pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.822444 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kwhf9" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.846068 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sfc8c" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.887936 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.922128 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.927062 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rtmb\" (UniqueName: \"kubernetes.io/projected/da2f49ee-105d-43d7-82f3-4735f1693e90-kube-api-access-8rtmb\") pod \"community-operators-mpd57\" (UID: \"da2f49ee-105d-43d7-82f3-4735f1693e90\") " pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:12:51 crc kubenswrapper[4791]: E1007 00:12:51.928819 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:52.428798488 +0000 UTC m=+99.024736139 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:51 crc kubenswrapper[4791]: I1007 00:12:51.979157 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-w9j8t" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.007796 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-k2bmb"] Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.025470 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.026792 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:52 crc kubenswrapper[4791]: E1007 00:12:52.027277 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:52.527235564 +0000 UTC m=+99.123173205 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.027392 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.030308 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k2bmb"] Oct 07 00:12:52 crc kubenswrapper[4791]: E1007 00:12:52.030936 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:52.53091 +0000 UTC m=+99.126847651 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.034213 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.138069 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.138271 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc81096-c228-44a9-ad8d-befc0fd73127-catalog-content\") pod \"certified-operators-k2bmb\" (UID: \"adc81096-c228-44a9-ad8d-befc0fd73127\") " pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.138311 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzwdf\" (UniqueName: \"kubernetes.io/projected/adc81096-c228-44a9-ad8d-befc0fd73127-kube-api-access-zzwdf\") pod \"certified-operators-k2bmb\" (UID: \"adc81096-c228-44a9-ad8d-befc0fd73127\") " pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.138346 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc81096-c228-44a9-ad8d-befc0fd73127-utilities\") pod \"certified-operators-k2bmb\" (UID: \"adc81096-c228-44a9-ad8d-befc0fd73127\") " pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:12:52 crc kubenswrapper[4791]: E1007 00:12:52.138511 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:52.638480609 +0000 UTC m=+99.234418260 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.163915 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ngtt4" podStartSLOduration=77.163888401 podStartE2EDuration="1m17.163888401s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:52.072171459 +0000 UTC m=+98.668109120" watchObservedRunningTime="2025-10-07 00:12:52.163888401 +0000 UTC m=+98.759826052" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.239361 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.239461 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc81096-c228-44a9-ad8d-befc0fd73127-catalog-content\") pod \"certified-operators-k2bmb\" (UID: \"adc81096-c228-44a9-ad8d-befc0fd73127\") " pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.239521 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzwdf\" (UniqueName: \"kubernetes.io/projected/adc81096-c228-44a9-ad8d-befc0fd73127-kube-api-access-zzwdf\") pod \"certified-operators-k2bmb\" (UID: \"adc81096-c228-44a9-ad8d-befc0fd73127\") " pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.239586 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc81096-c228-44a9-ad8d-befc0fd73127-utilities\") pod \"certified-operators-k2bmb\" (UID: \"adc81096-c228-44a9-ad8d-befc0fd73127\") " pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:12:52 crc kubenswrapper[4791]: E1007 00:12:52.240313 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:52.740297032 +0000 UTC m=+99.336234673 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.240729 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc81096-c228-44a9-ad8d-befc0fd73127-utilities\") pod \"certified-operators-k2bmb\" (UID: \"adc81096-c228-44a9-ad8d-befc0fd73127\") " pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.241464 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc81096-c228-44a9-ad8d-befc0fd73127-catalog-content\") pod \"certified-operators-k2bmb\" (UID: \"adc81096-c228-44a9-ad8d-befc0fd73127\") " pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.265222 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c87p6" podStartSLOduration=77.265199 podStartE2EDuration="1m17.265199s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:52.165384324 +0000 UTC m=+98.761321975" watchObservedRunningTime="2025-10-07 00:12:52.265199 +0000 UTC m=+98.861136651" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.289737 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzwdf\" (UniqueName: \"kubernetes.io/projected/adc81096-c228-44a9-ad8d-befc0fd73127-kube-api-access-zzwdf\") pod \"certified-operators-k2bmb\" (UID: \"adc81096-c228-44a9-ad8d-befc0fd73127\") " pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.342000 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:52 crc kubenswrapper[4791]: E1007 00:12:52.342381 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:52.842365943 +0000 UTC m=+99.438303594 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.416861 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.443694 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:52 crc kubenswrapper[4791]: E1007 00:12:52.444168 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:52.944145285 +0000 UTC m=+99.540082936 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.501173 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxfr6" podStartSLOduration=77.501150047 podStartE2EDuration="1m17.501150047s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:52.49951808 +0000 UTC m=+99.095455731" watchObservedRunningTime="2025-10-07 00:12:52.501150047 +0000 UTC m=+99.097087698" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.549806 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:52 crc kubenswrapper[4791]: E1007 00:12:52.550471 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:53.050442507 +0000 UTC m=+99.646380158 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.621638 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:12:52 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:12:52 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:12:52 crc kubenswrapper[4791]: healthz check failed Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.621691 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.656374 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:52 crc kubenswrapper[4791]: E1007 00:12:52.656771 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:53.15675789 +0000 UTC m=+99.752695541 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.703975 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pmlfq"] Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.758813 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:52 crc kubenswrapper[4791]: E1007 00:12:52.759106 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:53.259092508 +0000 UTC m=+99.855030159 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.791987 4791 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-pvkht container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.22:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.792037 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" podUID="1aa3f88f-af84-47d7-84cd-0a195a373a57" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.22:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.837646 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pmlfq" event={"ID":"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5","Type":"ContainerStarted","Data":"ebc801ffe4e401628adcc64444513c1a26feb712d50c0ed763fc33df9318cc09"} Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.860784 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:52 crc kubenswrapper[4791]: E1007 00:12:52.861143 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:53.361128388 +0000 UTC m=+99.957066039 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.862338 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t7lmb"] Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.871231 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nct9w" event={"ID":"f4083bd3-6146-4962-9aef-c7774a0c205a","Type":"ContainerStarted","Data":"558ddbc7ee149599546ccb7dadf3e0b26d95b88e00993387f36e04e4cb245c7d"} Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.897554 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-c8gwl" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.897825 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.963028 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:52 crc kubenswrapper[4791]: E1007 00:12:52.963347 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:53.463325752 +0000 UTC m=+100.059263403 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:52 crc kubenswrapper[4791]: I1007 00:12:52.964103 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:53 crc kubenswrapper[4791]: E1007 00:12:53.003305 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:53.503284513 +0000 UTC m=+100.099222164 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.067357 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:53 crc kubenswrapper[4791]: E1007 00:12:53.068032 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:53.568006488 +0000 UTC m=+100.163944139 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.170336 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:53 crc kubenswrapper[4791]: E1007 00:12:53.170677 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:53.670663595 +0000 UTC m=+100.266601246 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.211694 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mpd57"] Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.257848 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vxlpb"] Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.260124 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.260267 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.268238 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k2bmb"] Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.268537 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 07 00:12:53 crc kubenswrapper[4791]: E1007 00:12:53.277601 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:53.777570825 +0000 UTC m=+100.373508476 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.277534 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.278107 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:53 crc kubenswrapper[4791]: E1007 00:12:53.278591 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:53.778582164 +0000 UTC m=+100.374519815 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.343104 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vxlpb"] Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.373858 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-g26wx" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.379069 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.379472 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aea8c166-6a54-4361-8dd8-49acde45cad2-catalog-content\") pod \"redhat-marketplace-vxlpb\" (UID: \"aea8c166-6a54-4361-8dd8-49acde45cad2\") " pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.379532 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aea8c166-6a54-4361-8dd8-49acde45cad2-utilities\") pod \"redhat-marketplace-vxlpb\" (UID: \"aea8c166-6a54-4361-8dd8-49acde45cad2\") " pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:12:53 crc kubenswrapper[4791]: E1007 00:12:53.379599 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:53.879559333 +0000 UTC m=+100.475496984 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.379739 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8tdf\" (UniqueName: \"kubernetes.io/projected/aea8c166-6a54-4361-8dd8-49acde45cad2-kube-api-access-n8tdf\") pod \"redhat-marketplace-vxlpb\" (UID: \"aea8c166-6a54-4361-8dd8-49acde45cad2\") " pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.480715 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8tdf\" (UniqueName: \"kubernetes.io/projected/aea8c166-6a54-4361-8dd8-49acde45cad2-kube-api-access-n8tdf\") pod \"redhat-marketplace-vxlpb\" (UID: \"aea8c166-6a54-4361-8dd8-49acde45cad2\") " pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.480795 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.480862 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aea8c166-6a54-4361-8dd8-49acde45cad2-catalog-content\") pod \"redhat-marketplace-vxlpb\" (UID: \"aea8c166-6a54-4361-8dd8-49acde45cad2\") " pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.480886 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aea8c166-6a54-4361-8dd8-49acde45cad2-utilities\") pod \"redhat-marketplace-vxlpb\" (UID: \"aea8c166-6a54-4361-8dd8-49acde45cad2\") " pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.480933 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs\") pod \"network-metrics-daemon-ppklr\" (UID: \"9e16019f-8b86-49e5-a866-bb10c4c91e44\") " pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:53 crc kubenswrapper[4791]: E1007 00:12:53.483306 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:53.983281541 +0000 UTC m=+100.579219192 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.484104 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aea8c166-6a54-4361-8dd8-49acde45cad2-catalog-content\") pod \"redhat-marketplace-vxlpb\" (UID: \"aea8c166-6a54-4361-8dd8-49acde45cad2\") " pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.484417 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aea8c166-6a54-4361-8dd8-49acde45cad2-utilities\") pod \"redhat-marketplace-vxlpb\" (UID: \"aea8c166-6a54-4361-8dd8-49acde45cad2\") " pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.498433 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e16019f-8b86-49e5-a866-bb10c4c91e44-metrics-certs\") pod \"network-metrics-daemon-ppklr\" (UID: \"9e16019f-8b86-49e5-a866-bb10c4c91e44\") " pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.517741 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ppklr" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.531821 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8tdf\" (UniqueName: \"kubernetes.io/projected/aea8c166-6a54-4361-8dd8-49acde45cad2-kube-api-access-n8tdf\") pod \"redhat-marketplace-vxlpb\" (UID: \"aea8c166-6a54-4361-8dd8-49acde45cad2\") " pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.581643 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:53 crc kubenswrapper[4791]: E1007 00:12:53.582139 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:54.082124499 +0000 UTC m=+100.678062150 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.622363 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:12:53 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:12:53 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:12:53 crc kubenswrapper[4791]: healthz check failed Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.622441 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.654931 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tcgrr"] Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.656087 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.657771 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.683043 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-catalog-content\") pod \"redhat-marketplace-tcgrr\" (UID: \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\") " pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.683087 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-utilities\") pod \"redhat-marketplace-tcgrr\" (UID: \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\") " pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.683108 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b44vl\" (UniqueName: \"kubernetes.io/projected/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-kube-api-access-b44vl\") pod \"redhat-marketplace-tcgrr\" (UID: \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\") " pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.683140 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:53 crc kubenswrapper[4791]: E1007 00:12:53.683437 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:54.183422807 +0000 UTC m=+100.779360458 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.686865 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcgrr"] Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.783895 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.784687 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-catalog-content\") pod \"redhat-marketplace-tcgrr\" (UID: \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\") " pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.784724 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-utilities\") pod \"redhat-marketplace-tcgrr\" (UID: \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\") " pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.784746 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b44vl\" (UniqueName: \"kubernetes.io/projected/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-kube-api-access-b44vl\") pod \"redhat-marketplace-tcgrr\" (UID: \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\") " pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:12:53 crc kubenswrapper[4791]: E1007 00:12:53.785115 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:54.285100437 +0000 UTC m=+100.881038088 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.785513 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-catalog-content\") pod \"redhat-marketplace-tcgrr\" (UID: \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\") " pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.785739 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-utilities\") pod \"redhat-marketplace-tcgrr\" (UID: \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\") " pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.808811 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b44vl\" (UniqueName: \"kubernetes.io/projected/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-kube-api-access-b44vl\") pod \"redhat-marketplace-tcgrr\" (UID: \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\") " pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.888345 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:53 crc kubenswrapper[4791]: E1007 00:12:53.888826 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:54.388811894 +0000 UTC m=+100.984749545 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.895935 4791 generic.go:334] "Generic (PLEG): container finished" podID="da2f49ee-105d-43d7-82f3-4735f1693e90" containerID="689d39e269fe57ecbedd9380b2b04c09bf0dc911c1146573eb8f80c517d977d7" exitCode=0 Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.896050 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mpd57" event={"ID":"da2f49ee-105d-43d7-82f3-4735f1693e90","Type":"ContainerDied","Data":"689d39e269fe57ecbedd9380b2b04c09bf0dc911c1146573eb8f80c517d977d7"} Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.896087 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mpd57" event={"ID":"da2f49ee-105d-43d7-82f3-4735f1693e90","Type":"ContainerStarted","Data":"fd5ca0e31867a32323699588a4174a2075004f4ba6b9f154e3c1be0288d0cc2d"} Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.920833 4791 generic.go:334] "Generic (PLEG): container finished" podID="969bde5b-aa90-48e4-9352-76feaaabdd8e" containerID="abbfe5fedbe2d59ec89782a4c9674ce9ec3aa428ecfd904162742baf57367276" exitCode=0 Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.920975 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t7lmb" event={"ID":"969bde5b-aa90-48e4-9352-76feaaabdd8e","Type":"ContainerDied","Data":"abbfe5fedbe2d59ec89782a4c9674ce9ec3aa428ecfd904162742baf57367276"} Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.921026 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t7lmb" event={"ID":"969bde5b-aa90-48e4-9352-76feaaabdd8e","Type":"ContainerStarted","Data":"7e4d9697b9ac2b8faa7b877e10bdfc733477ba673f50e4807a8ef7347898279d"} Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.939178 4791 generic.go:334] "Generic (PLEG): container finished" podID="e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" containerID="ca6ffe3258922484a815b6e5c222be49113294dabc0b0f4bdb08212625f3aa88" exitCode=0 Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.939288 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pmlfq" event={"ID":"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5","Type":"ContainerDied","Data":"ca6ffe3258922484a815b6e5c222be49113294dabc0b0f4bdb08212625f3aa88"} Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.945122 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.963697 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nct9w" event={"ID":"f4083bd3-6146-4962-9aef-c7774a0c205a","Type":"ContainerStarted","Data":"f19935f67842d29c1c394902b487b17d3c3e5b37740c132f694730feee19b292"} Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.964982 4791 generic.go:334] "Generic (PLEG): container finished" podID="adc81096-c228-44a9-ad8d-befc0fd73127" containerID="aefb0d125b5e21b6bcdc88da32889bb7f7c99f3a0a8aa1fa3f7a55264bb53833" exitCode=0 Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.965151 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2bmb" event={"ID":"adc81096-c228-44a9-ad8d-befc0fd73127","Type":"ContainerDied","Data":"aefb0d125b5e21b6bcdc88da32889bb7f7c99f3a0a8aa1fa3f7a55264bb53833"} Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.965206 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2bmb" event={"ID":"adc81096-c228-44a9-ad8d-befc0fd73127","Type":"ContainerStarted","Data":"9b38ad415729cc054a81fbff810803f12b205d7679e6eeb01b3d3e8a89020e0c"} Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.984071 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:12:53 crc kubenswrapper[4791]: I1007 00:12:53.989362 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:53 crc kubenswrapper[4791]: E1007 00:12:53.990601 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:54.490576656 +0000 UTC m=+101.086514357 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.090699 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:54 crc kubenswrapper[4791]: E1007 00:12:54.093516 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:54.593501741 +0000 UTC m=+101.189439392 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.194024 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:54 crc kubenswrapper[4791]: E1007 00:12:54.195589 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:54.695571092 +0000 UTC m=+101.291508743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.212941 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-ppklr"] Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.234450 4791 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 07 00:12:54 crc kubenswrapper[4791]: W1007 00:12:54.283536 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9e16019f_8b86_49e5_a866_bb10c4c91e44.slice/crio-f6ba705d5af962059728e62a1e122a038fbe14b0ac5ca97ff434668cdf4fdcfa WatchSource:0}: Error finding container f6ba705d5af962059728e62a1e122a038fbe14b0ac5ca97ff434668cdf4fdcfa: Status 404 returned error can't find the container with id f6ba705d5af962059728e62a1e122a038fbe14b0ac5ca97ff434668cdf4fdcfa Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.297100 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:54 crc kubenswrapper[4791]: E1007 00:12:54.297576 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:54.79754343 +0000 UTC m=+101.393481081 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.377872 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vxlpb"] Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.401247 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:54 crc kubenswrapper[4791]: E1007 00:12:54.401685 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:54.901663309 +0000 UTC m=+101.497600960 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:54 crc kubenswrapper[4791]: W1007 00:12:54.414411 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaea8c166_6a54_4361_8dd8_49acde45cad2.slice/crio-aef6fd06f59c689d9707819e94d2ad0e477b63f992c24ddf0747dab16ffb79fe WatchSource:0}: Error finding container aef6fd06f59c689d9707819e94d2ad0e477b63f992c24ddf0747dab16ffb79fe: Status 404 returned error can't find the container with id aef6fd06f59c689d9707819e94d2ad0e477b63f992c24ddf0747dab16ffb79fe Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.454520 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l96f5"] Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.455699 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.459945 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.473778 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l96f5"] Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.505883 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.506074 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2c2e58d-292d-4116-ac44-c02b2f60a742-catalog-content\") pod \"redhat-operators-l96f5\" (UID: \"b2c2e58d-292d-4116-ac44-c02b2f60a742\") " pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.506129 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmbs2\" (UniqueName: \"kubernetes.io/projected/b2c2e58d-292d-4116-ac44-c02b2f60a742-kube-api-access-wmbs2\") pod \"redhat-operators-l96f5\" (UID: \"b2c2e58d-292d-4116-ac44-c02b2f60a742\") " pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.506308 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2c2e58d-292d-4116-ac44-c02b2f60a742-utilities\") pod \"redhat-operators-l96f5\" (UID: \"b2c2e58d-292d-4116-ac44-c02b2f60a742\") " pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:12:54 crc kubenswrapper[4791]: E1007 00:12:54.506884 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:55.00686674 +0000 UTC m=+101.602804391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.607363 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.607659 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2c2e58d-292d-4116-ac44-c02b2f60a742-utilities\") pod \"redhat-operators-l96f5\" (UID: \"b2c2e58d-292d-4116-ac44-c02b2f60a742\") " pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.607774 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2c2e58d-292d-4116-ac44-c02b2f60a742-catalog-content\") pod \"redhat-operators-l96f5\" (UID: \"b2c2e58d-292d-4116-ac44-c02b2f60a742\") " pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.607806 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmbs2\" (UniqueName: \"kubernetes.io/projected/b2c2e58d-292d-4116-ac44-c02b2f60a742-kube-api-access-wmbs2\") pod \"redhat-operators-l96f5\" (UID: \"b2c2e58d-292d-4116-ac44-c02b2f60a742\") " pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:12:54 crc kubenswrapper[4791]: E1007 00:12:54.607926 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:55.10788998 +0000 UTC m=+101.703827631 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.608270 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2c2e58d-292d-4116-ac44-c02b2f60a742-utilities\") pod \"redhat-operators-l96f5\" (UID: \"b2c2e58d-292d-4116-ac44-c02b2f60a742\") " pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.608318 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2c2e58d-292d-4116-ac44-c02b2f60a742-catalog-content\") pod \"redhat-operators-l96f5\" (UID: \"b2c2e58d-292d-4116-ac44-c02b2f60a742\") " pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.624647 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:12:54 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:12:54 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:12:54 crc kubenswrapper[4791]: healthz check failed Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.625036 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.644363 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmbs2\" (UniqueName: \"kubernetes.io/projected/b2c2e58d-292d-4116-ac44-c02b2f60a742-kube-api-access-wmbs2\") pod \"redhat-operators-l96f5\" (UID: \"b2c2e58d-292d-4116-ac44-c02b2f60a742\") " pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.647263 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcgrr"] Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.709090 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:54 crc kubenswrapper[4791]: E1007 00:12:54.709678 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:55.209656392 +0000 UTC m=+101.805594113 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.788568 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.828848 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:54 crc kubenswrapper[4791]: E1007 00:12:54.829258 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:55.329234897 +0000 UTC m=+101.925172548 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.838080 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nbmhc"] Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.839354 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.860393 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nbmhc"] Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.931134 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.931694 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gthjb\" (UniqueName: \"kubernetes.io/projected/ce732612-3842-4023-a2ff-fce88ab94972-kube-api-access-gthjb\") pod \"redhat-operators-nbmhc\" (UID: \"ce732612-3842-4023-a2ff-fce88ab94972\") " pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:12:54 crc kubenswrapper[4791]: E1007 00:12:54.931731 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:55.431711899 +0000 UTC m=+102.027649550 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.931759 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce732612-3842-4023-a2ff-fce88ab94972-utilities\") pod \"redhat-operators-nbmhc\" (UID: \"ce732612-3842-4023-a2ff-fce88ab94972\") " pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.931812 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce732612-3842-4023-a2ff-fce88ab94972-catalog-content\") pod \"redhat-operators-nbmhc\" (UID: \"ce732612-3842-4023-a2ff-fce88ab94972\") " pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.978279 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nct9w" event={"ID":"f4083bd3-6146-4962-9aef-c7774a0c205a","Type":"ContainerStarted","Data":"5b3f2a55aa7dee46054140bacb59fc443e2bcbe8bb8062694193659dab1dc442"} Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.980304 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcgrr" event={"ID":"5eddb7f9-673c-452e-9b7d-ac1462bb0c66","Type":"ContainerStarted","Data":"58475021ab279a8506072e2c9bfb0040f949343f4f52ca077b74c50de71cdf5c"} Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.980352 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcgrr" event={"ID":"5eddb7f9-673c-452e-9b7d-ac1462bb0c66","Type":"ContainerStarted","Data":"501bb7b023e4b1da42c1fb5d4c1c93f64b4b43bdcebce20143f5afd603d364d5"} Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.994474 4791 generic.go:334] "Generic (PLEG): container finished" podID="aea8c166-6a54-4361-8dd8-49acde45cad2" containerID="22b0e7d861535822ecfe72d317af47439e746559986f57123b1b6f5333d622b2" exitCode=0 Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.994588 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vxlpb" event={"ID":"aea8c166-6a54-4361-8dd8-49acde45cad2","Type":"ContainerDied","Data":"22b0e7d861535822ecfe72d317af47439e746559986f57123b1b6f5333d622b2"} Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.994668 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vxlpb" event={"ID":"aea8c166-6a54-4361-8dd8-49acde45cad2","Type":"ContainerStarted","Data":"aef6fd06f59c689d9707819e94d2ad0e477b63f992c24ddf0747dab16ffb79fe"} Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.998906 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ppklr" event={"ID":"9e16019f-8b86-49e5-a866-bb10c4c91e44","Type":"ContainerStarted","Data":"424bf0eeac1925749b3a511d1ccbc88422ffcd91e95b7b00fd0f19393d893ecd"} Oct 07 00:12:54 crc kubenswrapper[4791]: I1007 00:12:54.998937 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ppklr" event={"ID":"9e16019f-8b86-49e5-a866-bb10c4c91e44","Type":"ContainerStarted","Data":"f6ba705d5af962059728e62a1e122a038fbe14b0ac5ca97ff434668cdf4fdcfa"} Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.033085 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.033371 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gthjb\" (UniqueName: \"kubernetes.io/projected/ce732612-3842-4023-a2ff-fce88ab94972-kube-api-access-gthjb\") pod \"redhat-operators-nbmhc\" (UID: \"ce732612-3842-4023-a2ff-fce88ab94972\") " pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.033435 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce732612-3842-4023-a2ff-fce88ab94972-utilities\") pod \"redhat-operators-nbmhc\" (UID: \"ce732612-3842-4023-a2ff-fce88ab94972\") " pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.033464 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce732612-3842-4023-a2ff-fce88ab94972-catalog-content\") pod \"redhat-operators-nbmhc\" (UID: \"ce732612-3842-4023-a2ff-fce88ab94972\") " pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:12:55 crc kubenswrapper[4791]: E1007 00:12:55.033769 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 00:12:55.533737958 +0000 UTC m=+102.129675609 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.033876 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce732612-3842-4023-a2ff-fce88ab94972-catalog-content\") pod \"redhat-operators-nbmhc\" (UID: \"ce732612-3842-4023-a2ff-fce88ab94972\") " pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.034381 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce732612-3842-4023-a2ff-fce88ab94972-utilities\") pod \"redhat-operators-nbmhc\" (UID: \"ce732612-3842-4023-a2ff-fce88ab94972\") " pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.056643 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gthjb\" (UniqueName: \"kubernetes.io/projected/ce732612-3842-4023-a2ff-fce88ab94972-kube-api-access-gthjb\") pod \"redhat-operators-nbmhc\" (UID: \"ce732612-3842-4023-a2ff-fce88ab94972\") " pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.135066 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.135379 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l96f5"] Oct 07 00:12:55 crc kubenswrapper[4791]: E1007 00:12:55.135765 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 00:12:55.635749637 +0000 UTC m=+102.231687288 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpqb4" (UID: "05472df6-c385-4574-ba3c-844fe282b74b") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 00:12:55 crc kubenswrapper[4791]: W1007 00:12:55.150926 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2c2e58d_292d_4116_ac44_c02b2f60a742.slice/crio-b02f6801c7d5e97f51db5eb021f8605360a17172968a2ae7dd4fca29b0229f7d WatchSource:0}: Error finding container b02f6801c7d5e97f51db5eb021f8605360a17172968a2ae7dd4fca29b0229f7d: Status 404 returned error can't find the container with id b02f6801c7d5e97f51db5eb021f8605360a17172968a2ae7dd4fca29b0229f7d Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.185550 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.194440 4791 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-07T00:12:54.234487523Z","Handler":null,"Name":""} Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.199752 4791 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.199804 4791 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.237433 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.252032 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.339938 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.354180 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.354232 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.407285 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpqb4\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.459101 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.460238 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.462263 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.462489 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.474214 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.543865 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3d7e250b-7e86-4989-986a-3f01c8ea7144-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"3d7e250b-7e86-4989-986a-3f01c8ea7144\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.544583 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3d7e250b-7e86-4989-986a-3f01c8ea7144-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"3d7e250b-7e86-4989-986a-3f01c8ea7144\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.547203 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nbmhc"] Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.577994 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.624210 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:12:55 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:12:55 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:12:55 crc kubenswrapper[4791]: healthz check failed Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.624284 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.646818 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3d7e250b-7e86-4989-986a-3f01c8ea7144-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"3d7e250b-7e86-4989-986a-3f01c8ea7144\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.646937 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3d7e250b-7e86-4989-986a-3f01c8ea7144-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"3d7e250b-7e86-4989-986a-3f01c8ea7144\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.647084 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3d7e250b-7e86-4989-986a-3f01c8ea7144-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"3d7e250b-7e86-4989-986a-3f01c8ea7144\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.671471 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3d7e250b-7e86-4989-986a-3f01c8ea7144-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"3d7e250b-7e86-4989-986a-3f01c8ea7144\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.792277 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.883799 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.883859 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:55 crc kubenswrapper[4791]: I1007 00:12:55.894608 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.025719 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nct9w" event={"ID":"f4083bd3-6146-4962-9aef-c7774a0c205a","Type":"ContainerStarted","Data":"3d2f27551d4e560270db27dc6b1e879afdfa0fe8b88c05c06530e662195aac45"} Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.030367 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbmhc" event={"ID":"ce732612-3842-4023-a2ff-fce88ab94972","Type":"ContainerStarted","Data":"cf2e3a888e8f80e5ef654b7f41ddcd7fd0c032f1250e5dd0715365b25c0f6d69"} Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.030416 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbmhc" event={"ID":"ce732612-3842-4023-a2ff-fce88ab94972","Type":"ContainerStarted","Data":"2ad3ee79c2e7bc4633b9bc726c265eb35606e6bca92d834e42564f49d7189fdc"} Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.033200 4791 generic.go:334] "Generic (PLEG): container finished" podID="5eddb7f9-673c-452e-9b7d-ac1462bb0c66" containerID="58475021ab279a8506072e2c9bfb0040f949343f4f52ca077b74c50de71cdf5c" exitCode=0 Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.033251 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcgrr" event={"ID":"5eddb7f9-673c-452e-9b7d-ac1462bb0c66","Type":"ContainerDied","Data":"58475021ab279a8506072e2c9bfb0040f949343f4f52ca077b74c50de71cdf5c"} Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.035902 4791 generic.go:334] "Generic (PLEG): container finished" podID="b2c2e58d-292d-4116-ac44-c02b2f60a742" containerID="f2598c09fab0bbcd96884a4e71cb3d9b211fc247b410b7a0ae247c619fb1e721" exitCode=0 Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.035935 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l96f5" event={"ID":"b2c2e58d-292d-4116-ac44-c02b2f60a742","Type":"ContainerDied","Data":"f2598c09fab0bbcd96884a4e71cb3d9b211fc247b410b7a0ae247c619fb1e721"} Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.035950 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l96f5" event={"ID":"b2c2e58d-292d-4116-ac44-c02b2f60a742","Type":"ContainerStarted","Data":"b02f6801c7d5e97f51db5eb021f8605360a17172968a2ae7dd4fca29b0229f7d"} Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.042015 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ppklr" event={"ID":"9e16019f-8b86-49e5-a866-bb10c4c91e44","Type":"ContainerStarted","Data":"86ab9219bc2b09d91c92d49885ee15322bbf292c64ddd1472b5023d4d6d29f5b"} Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.051074 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-nct9w" podStartSLOduration=13.051047525 podStartE2EDuration="13.051047525s" podCreationTimestamp="2025-10-07 00:12:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:56.046058052 +0000 UTC m=+102.641995703" watchObservedRunningTime="2025-10-07 00:12:56.051047525 +0000 UTC m=+102.646985176" Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.058837 4791 generic.go:334] "Generic (PLEG): container finished" podID="20a4713c-cab0-4783-951b-1607d1d64c1d" containerID="418ea0752f565fba538fef2fe1725799c4d64d12e37b40a844dcce2e0b448c36" exitCode=0 Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.060390 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" event={"ID":"20a4713c-cab0-4783-951b-1607d1d64c1d","Type":"ContainerDied","Data":"418ea0752f565fba538fef2fe1725799c4d64d12e37b40a844dcce2e0b448c36"} Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.065511 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-zfd68" Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.073356 4791 patch_prober.go:28] interesting pod/downloads-7954f5f757-4scm9 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.073417 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-4scm9" podUID="4cef9a77-b44f-41a4-87af-0e5230970af6" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.073494 4791 patch_prober.go:28] interesting pod/downloads-7954f5f757-4scm9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.073548 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4scm9" podUID="4cef9a77-b44f-41a4-87af-0e5230970af6" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.101934 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-ppklr" podStartSLOduration=82.10187502 podStartE2EDuration="1m22.10187502s" podCreationTimestamp="2025-10-07 00:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:56.099516962 +0000 UTC m=+102.695454613" watchObservedRunningTime="2025-10-07 00:12:56.10187502 +0000 UTC m=+102.697812671" Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.105639 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.205935 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zpqb4"] Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.228587 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.228879 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.230157 4791 patch_prober.go:28] interesting pod/console-f9d7485db-slv5f container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.230205 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-slv5f" podUID="73643213-4cfb-4d70-b821-e78cc379de15" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Oct 07 00:12:56 crc kubenswrapper[4791]: W1007 00:12:56.287874 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod05472df6_c385_4574_ba3c_844fe282b74b.slice/crio-d90aade3db653d03c82e0960164fe0b4429f9a0a6f10d24d34b8af27c3cd3670 WatchSource:0}: Error finding container d90aade3db653d03c82e0960164fe0b4429f9a0a6f10d24d34b8af27c3cd3670: Status 404 returned error can't find the container with id d90aade3db653d03c82e0960164fe0b4429f9a0a6f10d24d34b8af27c3cd3670 Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.415018 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 07 00:12:56 crc kubenswrapper[4791]: W1007 00:12:56.426058 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod3d7e250b_7e86_4989_986a_3f01c8ea7144.slice/crio-d730a5d25af9ae693c39f6c66e24043fa0a1c284a1a29c772af0bf873f883752 WatchSource:0}: Error finding container d730a5d25af9ae693c39f6c66e24043fa0a1c284a1a29c772af0bf873f883752: Status 404 returned error can't find the container with id d730a5d25af9ae693c39f6c66e24043fa0a1c284a1a29c772af0bf873f883752 Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.591318 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.616831 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.623447 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:12:56 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:12:56 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:12:56 crc kubenswrapper[4791]: healthz check failed Oct 07 00:12:56 crc kubenswrapper[4791]: I1007 00:12:56.623513 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.110222 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" event={"ID":"05472df6-c385-4574-ba3c-844fe282b74b","Type":"ContainerStarted","Data":"1f48e4d76e1999112afece85e4e559b4232b40293a8dfc5cb83a89ed8dd50d46"} Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.110283 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" event={"ID":"05472df6-c385-4574-ba3c-844fe282b74b","Type":"ContainerStarted","Data":"d90aade3db653d03c82e0960164fe0b4429f9a0a6f10d24d34b8af27c3cd3670"} Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.111359 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.119946 4791 generic.go:334] "Generic (PLEG): container finished" podID="ce732612-3842-4023-a2ff-fce88ab94972" containerID="cf2e3a888e8f80e5ef654b7f41ddcd7fd0c032f1250e5dd0715365b25c0f6d69" exitCode=0 Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.120076 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbmhc" event={"ID":"ce732612-3842-4023-a2ff-fce88ab94972","Type":"ContainerDied","Data":"cf2e3a888e8f80e5ef654b7f41ddcd7fd0c032f1250e5dd0715365b25c0f6d69"} Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.138109 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"3d7e250b-7e86-4989-986a-3f01c8ea7144","Type":"ContainerStarted","Data":"9c87c097863305779de9d55eeebf9edc7af062fda9235e4b34e4d0b96f0c93dd"} Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.138149 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"3d7e250b-7e86-4989-986a-3f01c8ea7144","Type":"ContainerStarted","Data":"d730a5d25af9ae693c39f6c66e24043fa0a1c284a1a29c772af0bf873f883752"} Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.155930 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" podStartSLOduration=82.155904305 podStartE2EDuration="1m22.155904305s" podCreationTimestamp="2025-10-07 00:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:57.143951441 +0000 UTC m=+103.739889092" watchObservedRunningTime="2025-10-07 00:12:57.155904305 +0000 UTC m=+103.751841956" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.161116 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.161104525 podStartE2EDuration="2.161104525s" podCreationTimestamp="2025-10-07 00:12:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:12:57.156916074 +0000 UTC m=+103.752853735" watchObservedRunningTime="2025-10-07 00:12:57.161104525 +0000 UTC m=+103.757042176" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.331809 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.332901 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.335904 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.341174 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.341306 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.388683 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b5276b6f-3881-45d2-92a0-1728a3231a09-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b5276b6f-3881-45d2-92a0-1728a3231a09\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.388880 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b5276b6f-3881-45d2-92a0-1728a3231a09-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b5276b6f-3881-45d2-92a0-1728a3231a09\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.496211 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b5276b6f-3881-45d2-92a0-1728a3231a09-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b5276b6f-3881-45d2-92a0-1728a3231a09\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.496278 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b5276b6f-3881-45d2-92a0-1728a3231a09-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b5276b6f-3881-45d2-92a0-1728a3231a09\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.496377 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b5276b6f-3881-45d2-92a0-1728a3231a09-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b5276b6f-3881-45d2-92a0-1728a3231a09\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.580990 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b5276b6f-3881-45d2-92a0-1728a3231a09-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b5276b6f-3881-45d2-92a0-1728a3231a09\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.596438 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.626576 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:12:57 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:12:57 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:12:57 crc kubenswrapper[4791]: healthz check failed Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.626668 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.662531 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.700532 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5nnr\" (UniqueName: \"kubernetes.io/projected/20a4713c-cab0-4783-951b-1607d1d64c1d-kube-api-access-f5nnr\") pod \"20a4713c-cab0-4783-951b-1607d1d64c1d\" (UID: \"20a4713c-cab0-4783-951b-1607d1d64c1d\") " Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.703017 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/20a4713c-cab0-4783-951b-1607d1d64c1d-config-volume\") pod \"20a4713c-cab0-4783-951b-1607d1d64c1d\" (UID: \"20a4713c-cab0-4783-951b-1607d1d64c1d\") " Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.703205 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/20a4713c-cab0-4783-951b-1607d1d64c1d-secret-volume\") pod \"20a4713c-cab0-4783-951b-1607d1d64c1d\" (UID: \"20a4713c-cab0-4783-951b-1607d1d64c1d\") " Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.703834 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20a4713c-cab0-4783-951b-1607d1d64c1d-config-volume" (OuterVolumeSpecName: "config-volume") pod "20a4713c-cab0-4783-951b-1607d1d64c1d" (UID: "20a4713c-cab0-4783-951b-1607d1d64c1d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.709663 4791 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/20a4713c-cab0-4783-951b-1607d1d64c1d-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.727828 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20a4713c-cab0-4783-951b-1607d1d64c1d-kube-api-access-f5nnr" (OuterVolumeSpecName: "kube-api-access-f5nnr") pod "20a4713c-cab0-4783-951b-1607d1d64c1d" (UID: "20a4713c-cab0-4783-951b-1607d1d64c1d"). InnerVolumeSpecName "kube-api-access-f5nnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.731059 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20a4713c-cab0-4783-951b-1607d1d64c1d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "20a4713c-cab0-4783-951b-1607d1d64c1d" (UID: "20a4713c-cab0-4783-951b-1607d1d64c1d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.814002 4791 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/20a4713c-cab0-4783-951b-1607d1d64c1d-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 00:12:57 crc kubenswrapper[4791]: I1007 00:12:57.815136 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5nnr\" (UniqueName: \"kubernetes.io/projected/20a4713c-cab0-4783-951b-1607d1d64c1d-kube-api-access-f5nnr\") on node \"crc\" DevicePath \"\"" Oct 07 00:12:58 crc kubenswrapper[4791]: I1007 00:12:58.168140 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" event={"ID":"20a4713c-cab0-4783-951b-1607d1d64c1d","Type":"ContainerDied","Data":"46f69b52d7f0e24585c4506acdd72738156d8c7af9050f78d76a2c882ff6ec41"} Oct 07 00:12:58 crc kubenswrapper[4791]: I1007 00:12:58.168752 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46f69b52d7f0e24585c4506acdd72738156d8c7af9050f78d76a2c882ff6ec41" Oct 07 00:12:58 crc kubenswrapper[4791]: I1007 00:12:58.168173 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329920-jzqmv" Oct 07 00:12:58 crc kubenswrapper[4791]: I1007 00:12:58.179293 4791 generic.go:334] "Generic (PLEG): container finished" podID="3d7e250b-7e86-4989-986a-3f01c8ea7144" containerID="9c87c097863305779de9d55eeebf9edc7af062fda9235e4b34e4d0b96f0c93dd" exitCode=0 Oct 07 00:12:58 crc kubenswrapper[4791]: I1007 00:12:58.179939 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"3d7e250b-7e86-4989-986a-3f01c8ea7144","Type":"ContainerDied","Data":"9c87c097863305779de9d55eeebf9edc7af062fda9235e4b34e4d0b96f0c93dd"} Oct 07 00:12:58 crc kubenswrapper[4791]: I1007 00:12:58.237420 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 07 00:12:58 crc kubenswrapper[4791]: I1007 00:12:58.624120 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:12:58 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:12:58 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:12:58 crc kubenswrapper[4791]: healthz check failed Oct 07 00:12:58 crc kubenswrapper[4791]: I1007 00:12:58.624181 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:12:59 crc kubenswrapper[4791]: I1007 00:12:59.205958 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b5276b6f-3881-45d2-92a0-1728a3231a09","Type":"ContainerStarted","Data":"f5e3107975aea2890247e67e218a878544c24b4b8397ba0cec4d2630966df621"} Oct 07 00:12:59 crc kubenswrapper[4791]: I1007 00:12:59.621982 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:12:59 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:12:59 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:12:59 crc kubenswrapper[4791]: healthz check failed Oct 07 00:12:59 crc kubenswrapper[4791]: I1007 00:12:59.622459 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:12:59 crc kubenswrapper[4791]: I1007 00:12:59.630969 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 00:12:59 crc kubenswrapper[4791]: I1007 00:12:59.752790 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3d7e250b-7e86-4989-986a-3f01c8ea7144-kubelet-dir\") pod \"3d7e250b-7e86-4989-986a-3f01c8ea7144\" (UID: \"3d7e250b-7e86-4989-986a-3f01c8ea7144\") " Oct 07 00:12:59 crc kubenswrapper[4791]: I1007 00:12:59.752871 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3d7e250b-7e86-4989-986a-3f01c8ea7144-kube-api-access\") pod \"3d7e250b-7e86-4989-986a-3f01c8ea7144\" (UID: \"3d7e250b-7e86-4989-986a-3f01c8ea7144\") " Oct 07 00:12:59 crc kubenswrapper[4791]: I1007 00:12:59.752961 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d7e250b-7e86-4989-986a-3f01c8ea7144-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "3d7e250b-7e86-4989-986a-3f01c8ea7144" (UID: "3d7e250b-7e86-4989-986a-3f01c8ea7144"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:12:59 crc kubenswrapper[4791]: I1007 00:12:59.753197 4791 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3d7e250b-7e86-4989-986a-3f01c8ea7144-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 07 00:12:59 crc kubenswrapper[4791]: I1007 00:12:59.759943 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d7e250b-7e86-4989-986a-3f01c8ea7144-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "3d7e250b-7e86-4989-986a-3f01c8ea7144" (UID: "3d7e250b-7e86-4989-986a-3f01c8ea7144"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:12:59 crc kubenswrapper[4791]: I1007 00:12:59.854163 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3d7e250b-7e86-4989-986a-3f01c8ea7144-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:00 crc kubenswrapper[4791]: I1007 00:13:00.221246 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"3d7e250b-7e86-4989-986a-3f01c8ea7144","Type":"ContainerDied","Data":"d730a5d25af9ae693c39f6c66e24043fa0a1c284a1a29c772af0bf873f883752"} Oct 07 00:13:00 crc kubenswrapper[4791]: I1007 00:13:00.221335 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d730a5d25af9ae693c39f6c66e24043fa0a1c284a1a29c772af0bf873f883752" Oct 07 00:13:00 crc kubenswrapper[4791]: I1007 00:13:00.221506 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 00:13:00 crc kubenswrapper[4791]: I1007 00:13:00.232301 4791 generic.go:334] "Generic (PLEG): container finished" podID="b5276b6f-3881-45d2-92a0-1728a3231a09" containerID="98da9c1c9a4857cbd6f7d87a49ae17d8bbcc97bdbb76cec8714f8645a7626093" exitCode=0 Oct 07 00:13:00 crc kubenswrapper[4791]: I1007 00:13:00.232355 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b5276b6f-3881-45d2-92a0-1728a3231a09","Type":"ContainerDied","Data":"98da9c1c9a4857cbd6f7d87a49ae17d8bbcc97bdbb76cec8714f8645a7626093"} Oct 07 00:13:00 crc kubenswrapper[4791]: I1007 00:13:00.618662 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:13:00 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:13:00 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:13:00 crc kubenswrapper[4791]: healthz check failed Oct 07 00:13:00 crc kubenswrapper[4791]: I1007 00:13:00.618729 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:13:01 crc kubenswrapper[4791]: I1007 00:13:01.525086 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-667xs" Oct 07 00:13:01 crc kubenswrapper[4791]: I1007 00:13:01.619893 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:13:01 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:13:01 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:13:01 crc kubenswrapper[4791]: healthz check failed Oct 07 00:13:01 crc kubenswrapper[4791]: I1007 00:13:01.619967 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:13:02 crc kubenswrapper[4791]: I1007 00:13:02.618636 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:13:02 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:13:02 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:13:02 crc kubenswrapper[4791]: healthz check failed Oct 07 00:13:02 crc kubenswrapper[4791]: I1007 00:13:02.618723 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:13:03 crc kubenswrapper[4791]: I1007 00:13:03.618860 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:13:03 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:13:03 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:13:03 crc kubenswrapper[4791]: healthz check failed Oct 07 00:13:03 crc kubenswrapper[4791]: I1007 00:13:03.618932 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:13:04 crc kubenswrapper[4791]: I1007 00:13:04.621717 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:13:04 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:13:04 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:13:04 crc kubenswrapper[4791]: healthz check failed Oct 07 00:13:04 crc kubenswrapper[4791]: I1007 00:13:04.622210 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:13:05 crc kubenswrapper[4791]: I1007 00:13:05.620779 4791 patch_prober.go:28] interesting pod/router-default-5444994796-qrrrl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 00:13:05 crc kubenswrapper[4791]: [-]has-synced failed: reason withheld Oct 07 00:13:05 crc kubenswrapper[4791]: [+]process-running ok Oct 07 00:13:05 crc kubenswrapper[4791]: healthz check failed Oct 07 00:13:05 crc kubenswrapper[4791]: I1007 00:13:05.620871 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrrrl" podUID="6e45939a-f804-4553-bced-da13026cdc92" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 00:13:06 crc kubenswrapper[4791]: I1007 00:13:06.070220 4791 patch_prober.go:28] interesting pod/downloads-7954f5f757-4scm9 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 07 00:13:06 crc kubenswrapper[4791]: I1007 00:13:06.070580 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-4scm9" podUID="4cef9a77-b44f-41a4-87af-0e5230970af6" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 07 00:13:06 crc kubenswrapper[4791]: I1007 00:13:06.070821 4791 patch_prober.go:28] interesting pod/downloads-7954f5f757-4scm9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 07 00:13:06 crc kubenswrapper[4791]: I1007 00:13:06.070898 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4scm9" podUID="4cef9a77-b44f-41a4-87af-0e5230970af6" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 07 00:13:06 crc kubenswrapper[4791]: I1007 00:13:06.227798 4791 patch_prober.go:28] interesting pod/console-f9d7485db-slv5f container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Oct 07 00:13:06 crc kubenswrapper[4791]: I1007 00:13:06.227857 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-slv5f" podUID="73643213-4cfb-4d70-b821-e78cc379de15" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Oct 07 00:13:06 crc kubenswrapper[4791]: I1007 00:13:06.620351 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:13:06 crc kubenswrapper[4791]: I1007 00:13:06.625594 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-qrrrl" Oct 07 00:13:15 crc kubenswrapper[4791]: I1007 00:13:15.583091 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:13:16 crc kubenswrapper[4791]: I1007 00:13:16.097838 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-4scm9" Oct 07 00:13:16 crc kubenswrapper[4791]: I1007 00:13:16.232007 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:13:16 crc kubenswrapper[4791]: I1007 00:13:16.235632 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-slv5f" Oct 07 00:13:17 crc kubenswrapper[4791]: I1007 00:13:17.290717 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 00:13:17 crc kubenswrapper[4791]: I1007 00:13:17.395102 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b5276b6f-3881-45d2-92a0-1728a3231a09","Type":"ContainerDied","Data":"f5e3107975aea2890247e67e218a878544c24b4b8397ba0cec4d2630966df621"} Oct 07 00:13:17 crc kubenswrapper[4791]: I1007 00:13:17.395159 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5e3107975aea2890247e67e218a878544c24b4b8397ba0cec4d2630966df621" Oct 07 00:13:17 crc kubenswrapper[4791]: I1007 00:13:17.395118 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 00:13:17 crc kubenswrapper[4791]: I1007 00:13:17.399527 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b5276b6f-3881-45d2-92a0-1728a3231a09-kube-api-access\") pod \"b5276b6f-3881-45d2-92a0-1728a3231a09\" (UID: \"b5276b6f-3881-45d2-92a0-1728a3231a09\") " Oct 07 00:13:17 crc kubenswrapper[4791]: I1007 00:13:17.399879 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b5276b6f-3881-45d2-92a0-1728a3231a09-kubelet-dir\") pod \"b5276b6f-3881-45d2-92a0-1728a3231a09\" (UID: \"b5276b6f-3881-45d2-92a0-1728a3231a09\") " Oct 07 00:13:17 crc kubenswrapper[4791]: I1007 00:13:17.400066 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b5276b6f-3881-45d2-92a0-1728a3231a09-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "b5276b6f-3881-45d2-92a0-1728a3231a09" (UID: "b5276b6f-3881-45d2-92a0-1728a3231a09"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:13:17 crc kubenswrapper[4791]: I1007 00:13:17.410897 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5276b6f-3881-45d2-92a0-1728a3231a09-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "b5276b6f-3881-45d2-92a0-1728a3231a09" (UID: "b5276b6f-3881-45d2-92a0-1728a3231a09"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:13:17 crc kubenswrapper[4791]: I1007 00:13:17.501179 4791 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b5276b6f-3881-45d2-92a0-1728a3231a09-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:17 crc kubenswrapper[4791]: I1007 00:13:17.501222 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b5276b6f-3881-45d2-92a0-1728a3231a09-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:20 crc kubenswrapper[4791]: I1007 00:13:20.414558 4791 generic.go:334] "Generic (PLEG): container finished" podID="9a1fa4d9-9173-47fd-bc14-68317d5adfa4" containerID="917c4f304e0849ec653d3dd1b5204c47e02863093db4b8bd70c0d0bbe00a5864" exitCode=0 Oct 07 00:13:20 crc kubenswrapper[4791]: I1007 00:13:20.414640 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29329920-xf4vz" event={"ID":"9a1fa4d9-9173-47fd-bc14-68317d5adfa4","Type":"ContainerDied","Data":"917c4f304e0849ec653d3dd1b5204c47e02863093db4b8bd70c0d0bbe00a5864"} Oct 07 00:13:22 crc kubenswrapper[4791]: E1007 00:13:22.103884 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 07 00:13:22 crc kubenswrapper[4791]: E1007 00:13:22.104538 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kpd86,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-t7lmb_openshift-marketplace(969bde5b-aa90-48e4-9352-76feaaabdd8e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 00:13:22 crc kubenswrapper[4791]: E1007 00:13:22.106135 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-t7lmb" podUID="969bde5b-aa90-48e4-9352-76feaaabdd8e" Oct 07 00:13:22 crc kubenswrapper[4791]: I1007 00:13:22.441091 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29329920-xf4vz" Oct 07 00:13:22 crc kubenswrapper[4791]: I1007 00:13:22.444258 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29329920-xf4vz" event={"ID":"9a1fa4d9-9173-47fd-bc14-68317d5adfa4","Type":"ContainerDied","Data":"4afdea2907d4158988ecbb6bfc28a8279fd8409e7bfc688471beb6a595c5952c"} Oct 07 00:13:22 crc kubenswrapper[4791]: I1007 00:13:22.444300 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4afdea2907d4158988ecbb6bfc28a8279fd8409e7bfc688471beb6a595c5952c" Oct 07 00:13:22 crc kubenswrapper[4791]: I1007 00:13:22.483903 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/9a1fa4d9-9173-47fd-bc14-68317d5adfa4-serviceca\") pod \"9a1fa4d9-9173-47fd-bc14-68317d5adfa4\" (UID: \"9a1fa4d9-9173-47fd-bc14-68317d5adfa4\") " Oct 07 00:13:22 crc kubenswrapper[4791]: I1007 00:13:22.484047 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vp66q\" (UniqueName: \"kubernetes.io/projected/9a1fa4d9-9173-47fd-bc14-68317d5adfa4-kube-api-access-vp66q\") pod \"9a1fa4d9-9173-47fd-bc14-68317d5adfa4\" (UID: \"9a1fa4d9-9173-47fd-bc14-68317d5adfa4\") " Oct 07 00:13:22 crc kubenswrapper[4791]: I1007 00:13:22.484981 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a1fa4d9-9173-47fd-bc14-68317d5adfa4-serviceca" (OuterVolumeSpecName: "serviceca") pod "9a1fa4d9-9173-47fd-bc14-68317d5adfa4" (UID: "9a1fa4d9-9173-47fd-bc14-68317d5adfa4"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:13:22 crc kubenswrapper[4791]: I1007 00:13:22.495317 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a1fa4d9-9173-47fd-bc14-68317d5adfa4-kube-api-access-vp66q" (OuterVolumeSpecName: "kube-api-access-vp66q") pod "9a1fa4d9-9173-47fd-bc14-68317d5adfa4" (UID: "9a1fa4d9-9173-47fd-bc14-68317d5adfa4"). InnerVolumeSpecName "kube-api-access-vp66q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:13:22 crc kubenswrapper[4791]: E1007 00:13:22.495659 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 07 00:13:22 crc kubenswrapper[4791]: E1007 00:13:22.495848 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zzwdf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-k2bmb_openshift-marketplace(adc81096-c228-44a9-ad8d-befc0fd73127): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 00:13:22 crc kubenswrapper[4791]: E1007 00:13:22.497798 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-k2bmb" podUID="adc81096-c228-44a9-ad8d-befc0fd73127" Oct 07 00:13:22 crc kubenswrapper[4791]: I1007 00:13:22.586291 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vp66q\" (UniqueName: \"kubernetes.io/projected/9a1fa4d9-9173-47fd-bc14-68317d5adfa4-kube-api-access-vp66q\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:22 crc kubenswrapper[4791]: I1007 00:13:22.586325 4791 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/9a1fa4d9-9173-47fd-bc14-68317d5adfa4-serviceca\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:23 crc kubenswrapper[4791]: I1007 00:13:23.448691 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29329920-xf4vz" Oct 07 00:13:25 crc kubenswrapper[4791]: E1007 00:13:25.556704 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-t7lmb" podUID="969bde5b-aa90-48e4-9352-76feaaabdd8e" Oct 07 00:13:25 crc kubenswrapper[4791]: E1007 00:13:25.559041 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-k2bmb" podUID="adc81096-c228-44a9-ad8d-befc0fd73127" Oct 07 00:13:26 crc kubenswrapper[4791]: E1007 00:13:26.404147 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 07 00:13:26 crc kubenswrapper[4791]: E1007 00:13:26.404434 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b44vl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-tcgrr_openshift-marketplace(5eddb7f9-673c-452e-9b7d-ac1462bb0c66): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 00:13:26 crc kubenswrapper[4791]: E1007 00:13:26.406587 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 07 00:13:26 crc kubenswrapper[4791]: E1007 00:13:26.406767 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n8tdf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-vxlpb_openshift-marketplace(aea8c166-6a54-4361-8dd8-49acde45cad2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 00:13:26 crc kubenswrapper[4791]: E1007 00:13:26.407701 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-tcgrr" podUID="5eddb7f9-673c-452e-9b7d-ac1462bb0c66" Oct 07 00:13:26 crc kubenswrapper[4791]: E1007 00:13:26.408169 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-vxlpb" podUID="aea8c166-6a54-4361-8dd8-49acde45cad2" Oct 07 00:13:26 crc kubenswrapper[4791]: I1007 00:13:26.451820 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fs5qp" Oct 07 00:13:26 crc kubenswrapper[4791]: E1007 00:13:26.464468 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-vxlpb" podUID="aea8c166-6a54-4361-8dd8-49acde45cad2" Oct 07 00:13:26 crc kubenswrapper[4791]: E1007 00:13:26.467243 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-tcgrr" podUID="5eddb7f9-673c-452e-9b7d-ac1462bb0c66" Oct 07 00:13:27 crc kubenswrapper[4791]: I1007 00:13:27.472977 4791 generic.go:334] "Generic (PLEG): container finished" podID="b2c2e58d-292d-4116-ac44-c02b2f60a742" containerID="749c24599ed39a6ceb291bba9586e52654f183492d2f5ba0dd507c822e19ce8a" exitCode=0 Oct 07 00:13:27 crc kubenswrapper[4791]: I1007 00:13:27.473136 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l96f5" event={"ID":"b2c2e58d-292d-4116-ac44-c02b2f60a742","Type":"ContainerDied","Data":"749c24599ed39a6ceb291bba9586e52654f183492d2f5ba0dd507c822e19ce8a"} Oct 07 00:13:27 crc kubenswrapper[4791]: I1007 00:13:27.478569 4791 generic.go:334] "Generic (PLEG): container finished" podID="e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" containerID="f02e0c4dc32232b1a8addaf1547c12b5ac2301105ca7c8b80892c0fdb7b74b6d" exitCode=0 Oct 07 00:13:27 crc kubenswrapper[4791]: I1007 00:13:27.478633 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pmlfq" event={"ID":"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5","Type":"ContainerDied","Data":"f02e0c4dc32232b1a8addaf1547c12b5ac2301105ca7c8b80892c0fdb7b74b6d"} Oct 07 00:13:27 crc kubenswrapper[4791]: I1007 00:13:27.480759 4791 generic.go:334] "Generic (PLEG): container finished" podID="ce732612-3842-4023-a2ff-fce88ab94972" containerID="67f6fd02f43c77bec4280fe7be8be8e4fae36378c3f154ceb4e3340670f907bf" exitCode=0 Oct 07 00:13:27 crc kubenswrapper[4791]: I1007 00:13:27.480802 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbmhc" event={"ID":"ce732612-3842-4023-a2ff-fce88ab94972","Type":"ContainerDied","Data":"67f6fd02f43c77bec4280fe7be8be8e4fae36378c3f154ceb4e3340670f907bf"} Oct 07 00:13:27 crc kubenswrapper[4791]: I1007 00:13:27.505640 4791 generic.go:334] "Generic (PLEG): container finished" podID="da2f49ee-105d-43d7-82f3-4735f1693e90" containerID="9b958fc5fec4a28cea2713d565d593ebab0aed98d315d63ddbd8986afa6767ca" exitCode=0 Oct 07 00:13:27 crc kubenswrapper[4791]: I1007 00:13:27.505772 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mpd57" event={"ID":"da2f49ee-105d-43d7-82f3-4735f1693e90","Type":"ContainerDied","Data":"9b958fc5fec4a28cea2713d565d593ebab0aed98d315d63ddbd8986afa6767ca"} Oct 07 00:13:29 crc kubenswrapper[4791]: I1007 00:13:29.519422 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbmhc" event={"ID":"ce732612-3842-4023-a2ff-fce88ab94972","Type":"ContainerStarted","Data":"367f1c1caa2fb604b1b7ab748e5198b377a373eedd87c9437dfa56408d05ab29"} Oct 07 00:13:29 crc kubenswrapper[4791]: I1007 00:13:29.523039 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mpd57" event={"ID":"da2f49ee-105d-43d7-82f3-4735f1693e90","Type":"ContainerStarted","Data":"791b6f56bda2c2fc463dd0131784e0ab59c2e4e5a620c61de6f5f31a20333987"} Oct 07 00:13:29 crc kubenswrapper[4791]: I1007 00:13:29.525753 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l96f5" event={"ID":"b2c2e58d-292d-4116-ac44-c02b2f60a742","Type":"ContainerStarted","Data":"d1426db41038678bacb452f8c9828aac6a729090c39af5eee5285b750f78112d"} Oct 07 00:13:29 crc kubenswrapper[4791]: I1007 00:13:29.527667 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pmlfq" event={"ID":"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5","Type":"ContainerStarted","Data":"b9eb3d915919d1983661fbd87a80781332a0779a3ef98b062ae3f5e73e9a955d"} Oct 07 00:13:29 crc kubenswrapper[4791]: I1007 00:13:29.538728 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nbmhc" podStartSLOduration=2.74336141 podStartE2EDuration="35.538706773s" podCreationTimestamp="2025-10-07 00:12:54 +0000 UTC" firstStartedPulling="2025-10-07 00:12:56.032705967 +0000 UTC m=+102.628643618" lastFinishedPulling="2025-10-07 00:13:28.82805133 +0000 UTC m=+135.423988981" observedRunningTime="2025-10-07 00:13:29.537214156 +0000 UTC m=+136.133151817" watchObservedRunningTime="2025-10-07 00:13:29.538706773 +0000 UTC m=+136.134644424" Oct 07 00:13:29 crc kubenswrapper[4791]: I1007 00:13:29.559996 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mpd57" podStartSLOduration=3.6621488859999998 podStartE2EDuration="38.559976079s" podCreationTimestamp="2025-10-07 00:12:51 +0000 UTC" firstStartedPulling="2025-10-07 00:12:53.944728065 +0000 UTC m=+100.540665716" lastFinishedPulling="2025-10-07 00:13:28.842555258 +0000 UTC m=+135.438492909" observedRunningTime="2025-10-07 00:13:29.55536947 +0000 UTC m=+136.151307121" watchObservedRunningTime="2025-10-07 00:13:29.559976079 +0000 UTC m=+136.155913730" Oct 07 00:13:29 crc kubenswrapper[4791]: I1007 00:13:29.570077 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l96f5" podStartSLOduration=2.7313194750000003 podStartE2EDuration="35.570036125s" podCreationTimestamp="2025-10-07 00:12:54 +0000 UTC" firstStartedPulling="2025-10-07 00:12:56.037463604 +0000 UTC m=+102.633401245" lastFinishedPulling="2025-10-07 00:13:28.876180244 +0000 UTC m=+135.472117895" observedRunningTime="2025-10-07 00:13:29.569790024 +0000 UTC m=+136.165727675" watchObservedRunningTime="2025-10-07 00:13:29.570036125 +0000 UTC m=+136.165973786" Oct 07 00:13:29 crc kubenswrapper[4791]: I1007 00:13:29.589112 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pmlfq" podStartSLOduration=3.5800762170000002 podStartE2EDuration="38.58909187s" podCreationTimestamp="2025-10-07 00:12:51 +0000 UTC" firstStartedPulling="2025-10-07 00:12:53.945553809 +0000 UTC m=+100.541491460" lastFinishedPulling="2025-10-07 00:13:28.954569462 +0000 UTC m=+135.550507113" observedRunningTime="2025-10-07 00:13:29.588737564 +0000 UTC m=+136.184675235" watchObservedRunningTime="2025-10-07 00:13:29.58909187 +0000 UTC m=+136.185029521" Oct 07 00:13:31 crc kubenswrapper[4791]: I1007 00:13:31.622980 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:13:31 crc kubenswrapper[4791]: I1007 00:13:31.623343 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:13:31 crc kubenswrapper[4791]: I1007 00:13:31.757923 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:13:32 crc kubenswrapper[4791]: I1007 00:13:32.034966 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:13:32 crc kubenswrapper[4791]: I1007 00:13:32.035029 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:13:32 crc kubenswrapper[4791]: I1007 00:13:32.085730 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:13:34 crc kubenswrapper[4791]: I1007 00:13:34.788797 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:13:34 crc kubenswrapper[4791]: I1007 00:13:34.789371 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:13:34 crc kubenswrapper[4791]: I1007 00:13:34.860573 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:13:35 crc kubenswrapper[4791]: I1007 00:13:35.185757 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:13:35 crc kubenswrapper[4791]: I1007 00:13:35.185836 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:13:35 crc kubenswrapper[4791]: I1007 00:13:35.240987 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:13:35 crc kubenswrapper[4791]: I1007 00:13:35.628814 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:13:35 crc kubenswrapper[4791]: I1007 00:13:35.651276 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:13:36 crc kubenswrapper[4791]: I1007 00:13:36.843748 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nbmhc"] Oct 07 00:13:37 crc kubenswrapper[4791]: I1007 00:13:37.601652 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nbmhc" podUID="ce732612-3842-4023-a2ff-fce88ab94972" containerName="registry-server" containerID="cri-o://367f1c1caa2fb604b1b7ab748e5198b377a373eedd87c9437dfa56408d05ab29" gracePeriod=2 Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.494637 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.530321 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gthjb\" (UniqueName: \"kubernetes.io/projected/ce732612-3842-4023-a2ff-fce88ab94972-kube-api-access-gthjb\") pod \"ce732612-3842-4023-a2ff-fce88ab94972\" (UID: \"ce732612-3842-4023-a2ff-fce88ab94972\") " Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.530494 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce732612-3842-4023-a2ff-fce88ab94972-catalog-content\") pod \"ce732612-3842-4023-a2ff-fce88ab94972\" (UID: \"ce732612-3842-4023-a2ff-fce88ab94972\") " Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.530568 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce732612-3842-4023-a2ff-fce88ab94972-utilities\") pod \"ce732612-3842-4023-a2ff-fce88ab94972\" (UID: \"ce732612-3842-4023-a2ff-fce88ab94972\") " Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.531975 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce732612-3842-4023-a2ff-fce88ab94972-utilities" (OuterVolumeSpecName: "utilities") pod "ce732612-3842-4023-a2ff-fce88ab94972" (UID: "ce732612-3842-4023-a2ff-fce88ab94972"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.538876 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce732612-3842-4023-a2ff-fce88ab94972-kube-api-access-gthjb" (OuterVolumeSpecName: "kube-api-access-gthjb") pod "ce732612-3842-4023-a2ff-fce88ab94972" (UID: "ce732612-3842-4023-a2ff-fce88ab94972"). InnerVolumeSpecName "kube-api-access-gthjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.610787 4791 generic.go:334] "Generic (PLEG): container finished" podID="ce732612-3842-4023-a2ff-fce88ab94972" containerID="367f1c1caa2fb604b1b7ab748e5198b377a373eedd87c9437dfa56408d05ab29" exitCode=0 Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.610845 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbmhc" event={"ID":"ce732612-3842-4023-a2ff-fce88ab94972","Type":"ContainerDied","Data":"367f1c1caa2fb604b1b7ab748e5198b377a373eedd87c9437dfa56408d05ab29"} Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.610879 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbmhc" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.610890 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbmhc" event={"ID":"ce732612-3842-4023-a2ff-fce88ab94972","Type":"ContainerDied","Data":"2ad3ee79c2e7bc4633b9bc726c265eb35606e6bca92d834e42564f49d7189fdc"} Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.610914 4791 scope.go:117] "RemoveContainer" containerID="367f1c1caa2fb604b1b7ab748e5198b377a373eedd87c9437dfa56408d05ab29" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.627842 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce732612-3842-4023-a2ff-fce88ab94972-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce732612-3842-4023-a2ff-fce88ab94972" (UID: "ce732612-3842-4023-a2ff-fce88ab94972"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.666802 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gthjb\" (UniqueName: \"kubernetes.io/projected/ce732612-3842-4023-a2ff-fce88ab94972-kube-api-access-gthjb\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.666891 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce732612-3842-4023-a2ff-fce88ab94972-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.666906 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce732612-3842-4023-a2ff-fce88ab94972-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.679745 4791 scope.go:117] "RemoveContainer" containerID="67f6fd02f43c77bec4280fe7be8be8e4fae36378c3f154ceb4e3340670f907bf" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.699086 4791 scope.go:117] "RemoveContainer" containerID="cf2e3a888e8f80e5ef654b7f41ddcd7fd0c032f1250e5dd0715365b25c0f6d69" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.714162 4791 scope.go:117] "RemoveContainer" containerID="367f1c1caa2fb604b1b7ab748e5198b377a373eedd87c9437dfa56408d05ab29" Oct 07 00:13:38 crc kubenswrapper[4791]: E1007 00:13:38.714761 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"367f1c1caa2fb604b1b7ab748e5198b377a373eedd87c9437dfa56408d05ab29\": container with ID starting with 367f1c1caa2fb604b1b7ab748e5198b377a373eedd87c9437dfa56408d05ab29 not found: ID does not exist" containerID="367f1c1caa2fb604b1b7ab748e5198b377a373eedd87c9437dfa56408d05ab29" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.714815 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"367f1c1caa2fb604b1b7ab748e5198b377a373eedd87c9437dfa56408d05ab29"} err="failed to get container status \"367f1c1caa2fb604b1b7ab748e5198b377a373eedd87c9437dfa56408d05ab29\": rpc error: code = NotFound desc = could not find container \"367f1c1caa2fb604b1b7ab748e5198b377a373eedd87c9437dfa56408d05ab29\": container with ID starting with 367f1c1caa2fb604b1b7ab748e5198b377a373eedd87c9437dfa56408d05ab29 not found: ID does not exist" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.714880 4791 scope.go:117] "RemoveContainer" containerID="67f6fd02f43c77bec4280fe7be8be8e4fae36378c3f154ceb4e3340670f907bf" Oct 07 00:13:38 crc kubenswrapper[4791]: E1007 00:13:38.715182 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67f6fd02f43c77bec4280fe7be8be8e4fae36378c3f154ceb4e3340670f907bf\": container with ID starting with 67f6fd02f43c77bec4280fe7be8be8e4fae36378c3f154ceb4e3340670f907bf not found: ID does not exist" containerID="67f6fd02f43c77bec4280fe7be8be8e4fae36378c3f154ceb4e3340670f907bf" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.715205 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67f6fd02f43c77bec4280fe7be8be8e4fae36378c3f154ceb4e3340670f907bf"} err="failed to get container status \"67f6fd02f43c77bec4280fe7be8be8e4fae36378c3f154ceb4e3340670f907bf\": rpc error: code = NotFound desc = could not find container \"67f6fd02f43c77bec4280fe7be8be8e4fae36378c3f154ceb4e3340670f907bf\": container with ID starting with 67f6fd02f43c77bec4280fe7be8be8e4fae36378c3f154ceb4e3340670f907bf not found: ID does not exist" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.715221 4791 scope.go:117] "RemoveContainer" containerID="cf2e3a888e8f80e5ef654b7f41ddcd7fd0c032f1250e5dd0715365b25c0f6d69" Oct 07 00:13:38 crc kubenswrapper[4791]: E1007 00:13:38.715491 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf2e3a888e8f80e5ef654b7f41ddcd7fd0c032f1250e5dd0715365b25c0f6d69\": container with ID starting with cf2e3a888e8f80e5ef654b7f41ddcd7fd0c032f1250e5dd0715365b25c0f6d69 not found: ID does not exist" containerID="cf2e3a888e8f80e5ef654b7f41ddcd7fd0c032f1250e5dd0715365b25c0f6d69" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.715512 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf2e3a888e8f80e5ef654b7f41ddcd7fd0c032f1250e5dd0715365b25c0f6d69"} err="failed to get container status \"cf2e3a888e8f80e5ef654b7f41ddcd7fd0c032f1250e5dd0715365b25c0f6d69\": rpc error: code = NotFound desc = could not find container \"cf2e3a888e8f80e5ef654b7f41ddcd7fd0c032f1250e5dd0715365b25c0f6d69\": container with ID starting with cf2e3a888e8f80e5ef654b7f41ddcd7fd0c032f1250e5dd0715365b25c0f6d69 not found: ID does not exist" Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.945654 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nbmhc"] Oct 07 00:13:38 crc kubenswrapper[4791]: I1007 00:13:38.948169 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nbmhc"] Oct 07 00:13:39 crc kubenswrapper[4791]: I1007 00:13:39.628477 4791 generic.go:334] "Generic (PLEG): container finished" podID="5eddb7f9-673c-452e-9b7d-ac1462bb0c66" containerID="226a9b436fa4660b1e4b4a02c75e453dc4954be27b5c8231dc7ca544ea22835f" exitCode=0 Oct 07 00:13:39 crc kubenswrapper[4791]: I1007 00:13:39.628540 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcgrr" event={"ID":"5eddb7f9-673c-452e-9b7d-ac1462bb0c66","Type":"ContainerDied","Data":"226a9b436fa4660b1e4b4a02c75e453dc4954be27b5c8231dc7ca544ea22835f"} Oct 07 00:13:40 crc kubenswrapper[4791]: I1007 00:13:40.087497 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce732612-3842-4023-a2ff-fce88ab94972" path="/var/lib/kubelet/pods/ce732612-3842-4023-a2ff-fce88ab94972/volumes" Oct 07 00:13:40 crc kubenswrapper[4791]: I1007 00:13:40.637508 4791 generic.go:334] "Generic (PLEG): container finished" podID="adc81096-c228-44a9-ad8d-befc0fd73127" containerID="cdb39dd2b5b70d365f13438e5b27fbedc73ff3efd8d3991094ca2833e85865c9" exitCode=0 Oct 07 00:13:40 crc kubenswrapper[4791]: I1007 00:13:40.637614 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2bmb" event={"ID":"adc81096-c228-44a9-ad8d-befc0fd73127","Type":"ContainerDied","Data":"cdb39dd2b5b70d365f13438e5b27fbedc73ff3efd8d3991094ca2833e85865c9"} Oct 07 00:13:40 crc kubenswrapper[4791]: I1007 00:13:40.642463 4791 generic.go:334] "Generic (PLEG): container finished" podID="969bde5b-aa90-48e4-9352-76feaaabdd8e" containerID="68882cd24ef0f4109264884180de873aed4336a06c1a6a16d81a0457b129e246" exitCode=0 Oct 07 00:13:40 crc kubenswrapper[4791]: I1007 00:13:40.642514 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t7lmb" event={"ID":"969bde5b-aa90-48e4-9352-76feaaabdd8e","Type":"ContainerDied","Data":"68882cd24ef0f4109264884180de873aed4336a06c1a6a16d81a0457b129e246"} Oct 07 00:13:40 crc kubenswrapper[4791]: I1007 00:13:40.645358 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcgrr" event={"ID":"5eddb7f9-673c-452e-9b7d-ac1462bb0c66","Type":"ContainerStarted","Data":"ada386785a8d15c6ddcb12cf6349a1a58e24720f6df6032f1c33eef316dbb017"} Oct 07 00:13:40 crc kubenswrapper[4791]: I1007 00:13:40.694828 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tcgrr" podStartSLOduration=2.6785796509999997 podStartE2EDuration="47.694809213s" podCreationTimestamp="2025-10-07 00:12:53 +0000 UTC" firstStartedPulling="2025-10-07 00:12:54.991660875 +0000 UTC m=+101.587598526" lastFinishedPulling="2025-10-07 00:13:40.007890437 +0000 UTC m=+146.603828088" observedRunningTime="2025-10-07 00:13:40.693532165 +0000 UTC m=+147.289469826" watchObservedRunningTime="2025-10-07 00:13:40.694809213 +0000 UTC m=+147.290746864" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.004275 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.007059 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.021154 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.105903 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.106139 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.106184 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.108568 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.108684 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.117491 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.118194 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.133148 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.133330 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.226733 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.259207 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.265683 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:13:41 crc kubenswrapper[4791]: W1007 00:13:41.477965 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-bc5e86391021948bd9c571fc8842ea532c67cec64f0cf677c0668959e0b8e855 WatchSource:0}: Error finding container bc5e86391021948bd9c571fc8842ea532c67cec64f0cf677c0668959e0b8e855: Status 404 returned error can't find the container with id bc5e86391021948bd9c571fc8842ea532c67cec64f0cf677c0668959e0b8e855 Oct 07 00:13:41 crc kubenswrapper[4791]: W1007 00:13:41.549275 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-a8ea897dbf67c504f0897efd75dfbc68c890ff0d9ef86a157d9c0ca9996b208c WatchSource:0}: Error finding container a8ea897dbf67c504f0897efd75dfbc68c890ff0d9ef86a157d9c0ca9996b208c: Status 404 returned error can't find the container with id a8ea897dbf67c504f0897efd75dfbc68c890ff0d9ef86a157d9c0ca9996b208c Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.603789 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.603856 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:13:41 crc kubenswrapper[4791]: W1007 00:13:41.611555 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-245f13b701fe79d7566553d0ab6385c2832d15b66115696e771409330020b78b WatchSource:0}: Error finding container 245f13b701fe79d7566553d0ab6385c2832d15b66115696e771409330020b78b: Status 404 returned error can't find the container with id 245f13b701fe79d7566553d0ab6385c2832d15b66115696e771409330020b78b Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.663265 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"245f13b701fe79d7566553d0ab6385c2832d15b66115696e771409330020b78b"} Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.667054 4791 generic.go:334] "Generic (PLEG): container finished" podID="aea8c166-6a54-4361-8dd8-49acde45cad2" containerID="7f773ec7e206cb8232d8d1c23c612be7e14f47c245dd4fc77588a848ec5a7bb4" exitCode=0 Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.667136 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vxlpb" event={"ID":"aea8c166-6a54-4361-8dd8-49acde45cad2","Type":"ContainerDied","Data":"7f773ec7e206cb8232d8d1c23c612be7e14f47c245dd4fc77588a848ec5a7bb4"} Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.670700 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"a8ea897dbf67c504f0897efd75dfbc68c890ff0d9ef86a157d9c0ca9996b208c"} Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.675104 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2bmb" event={"ID":"adc81096-c228-44a9-ad8d-befc0fd73127","Type":"ContainerStarted","Data":"7a62a8cd1aea4fb7207d0e96979f05a1d935265f0a3871cb5ad1fb4e3c1c17fa"} Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.678833 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.684439 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t7lmb" event={"ID":"969bde5b-aa90-48e4-9352-76feaaabdd8e","Type":"ContainerStarted","Data":"647931cbfed2325b0274373a146567bbc12e4a0e9a3a0458657695355effd5f0"} Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.687121 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"bc5e86391021948bd9c571fc8842ea532c67cec64f0cf677c0668959e0b8e855"} Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.717584 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-k2bmb" podStartSLOduration=3.37510334 podStartE2EDuration="50.71756388s" podCreationTimestamp="2025-10-07 00:12:51 +0000 UTC" firstStartedPulling="2025-10-07 00:12:53.983170713 +0000 UTC m=+100.579108364" lastFinishedPulling="2025-10-07 00:13:41.325631253 +0000 UTC m=+147.921568904" observedRunningTime="2025-10-07 00:13:41.715527398 +0000 UTC m=+148.311465049" watchObservedRunningTime="2025-10-07 00:13:41.71756388 +0000 UTC m=+148.313501531" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.739565 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t7lmb" podStartSLOduration=3.617247928 podStartE2EDuration="50.739526697s" podCreationTimestamp="2025-10-07 00:12:51 +0000 UTC" firstStartedPulling="2025-10-07 00:12:53.947042772 +0000 UTC m=+100.542980423" lastFinishedPulling="2025-10-07 00:13:41.069321541 +0000 UTC m=+147.665259192" observedRunningTime="2025-10-07 00:13:41.736819714 +0000 UTC m=+148.332757365" watchObservedRunningTime="2025-10-07 00:13:41.739526697 +0000 UTC m=+148.335476139" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.794360 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.794428 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:13:41 crc kubenswrapper[4791]: I1007 00:13:41.903220 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pvkht"] Oct 07 00:13:42 crc kubenswrapper[4791]: I1007 00:13:42.109620 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:13:42 crc kubenswrapper[4791]: I1007 00:13:42.418872 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:13:42 crc kubenswrapper[4791]: I1007 00:13:42.418953 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:13:42 crc kubenswrapper[4791]: I1007 00:13:42.695905 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f05e47ad8a3565d89488145b16ddd82fb81a0f2a73038c01ae7ad6a58bdffd1d"} Oct 07 00:13:42 crc kubenswrapper[4791]: I1007 00:13:42.697854 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"4b08cba88e549de03ba4c437ba4975f925cd71b5eb6fd7688cf9c7a5576ab356"} Oct 07 00:13:42 crc kubenswrapper[4791]: I1007 00:13:42.698120 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:13:42 crc kubenswrapper[4791]: I1007 00:13:42.700605 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vxlpb" event={"ID":"aea8c166-6a54-4361-8dd8-49acde45cad2","Type":"ContainerStarted","Data":"371969781e05f6a41750d7a44ba01e25c7b374bcc9c57b0232007c992af5d575"} Oct 07 00:13:42 crc kubenswrapper[4791]: I1007 00:13:42.703628 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"b9dfd8a45e7e02c684b3a5827384fb67928dc807ec45b150e5efbe9872c57cc1"} Oct 07 00:13:42 crc kubenswrapper[4791]: I1007 00:13:42.760150 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vxlpb" podStartSLOduration=2.5637233459999997 podStartE2EDuration="49.760127357s" podCreationTimestamp="2025-10-07 00:12:53 +0000 UTC" firstStartedPulling="2025-10-07 00:12:54.996619108 +0000 UTC m=+101.592556759" lastFinishedPulling="2025-10-07 00:13:42.193023129 +0000 UTC m=+148.788960770" observedRunningTime="2025-10-07 00:13:42.738214083 +0000 UTC m=+149.334151734" watchObservedRunningTime="2025-10-07 00:13:42.760127357 +0000 UTC m=+149.356065008" Oct 07 00:13:42 crc kubenswrapper[4791]: I1007 00:13:42.877126 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-t7lmb" podUID="969bde5b-aa90-48e4-9352-76feaaabdd8e" containerName="registry-server" probeResult="failure" output=< Oct 07 00:13:42 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Oct 07 00:13:42 crc kubenswrapper[4791]: > Oct 07 00:13:43 crc kubenswrapper[4791]: I1007 00:13:43.471304 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-k2bmb" podUID="adc81096-c228-44a9-ad8d-befc0fd73127" containerName="registry-server" probeResult="failure" output=< Oct 07 00:13:43 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Oct 07 00:13:43 crc kubenswrapper[4791]: > Oct 07 00:13:43 crc kubenswrapper[4791]: I1007 00:13:43.659153 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:13:43 crc kubenswrapper[4791]: I1007 00:13:43.659559 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:13:43 crc kubenswrapper[4791]: I1007 00:13:43.705417 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:13:43 crc kubenswrapper[4791]: I1007 00:13:43.985278 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:13:43 crc kubenswrapper[4791]: I1007 00:13:43.985383 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:13:44 crc kubenswrapper[4791]: I1007 00:13:44.035287 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:13:44 crc kubenswrapper[4791]: I1007 00:13:44.444948 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mpd57"] Oct 07 00:13:44 crc kubenswrapper[4791]: I1007 00:13:44.445266 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mpd57" podUID="da2f49ee-105d-43d7-82f3-4735f1693e90" containerName="registry-server" containerID="cri-o://791b6f56bda2c2fc463dd0131784e0ab59c2e4e5a620c61de6f5f31a20333987" gracePeriod=2 Oct 07 00:13:44 crc kubenswrapper[4791]: I1007 00:13:44.722009 4791 generic.go:334] "Generic (PLEG): container finished" podID="da2f49ee-105d-43d7-82f3-4735f1693e90" containerID="791b6f56bda2c2fc463dd0131784e0ab59c2e4e5a620c61de6f5f31a20333987" exitCode=0 Oct 07 00:13:44 crc kubenswrapper[4791]: I1007 00:13:44.723107 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mpd57" event={"ID":"da2f49ee-105d-43d7-82f3-4735f1693e90","Type":"ContainerDied","Data":"791b6f56bda2c2fc463dd0131784e0ab59c2e4e5a620c61de6f5f31a20333987"} Oct 07 00:13:44 crc kubenswrapper[4791]: I1007 00:13:44.800468 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:13:44 crc kubenswrapper[4791]: I1007 00:13:44.974956 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da2f49ee-105d-43d7-82f3-4735f1693e90-utilities\") pod \"da2f49ee-105d-43d7-82f3-4735f1693e90\" (UID: \"da2f49ee-105d-43d7-82f3-4735f1693e90\") " Oct 07 00:13:44 crc kubenswrapper[4791]: I1007 00:13:44.975036 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rtmb\" (UniqueName: \"kubernetes.io/projected/da2f49ee-105d-43d7-82f3-4735f1693e90-kube-api-access-8rtmb\") pod \"da2f49ee-105d-43d7-82f3-4735f1693e90\" (UID: \"da2f49ee-105d-43d7-82f3-4735f1693e90\") " Oct 07 00:13:44 crc kubenswrapper[4791]: I1007 00:13:44.975068 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da2f49ee-105d-43d7-82f3-4735f1693e90-catalog-content\") pod \"da2f49ee-105d-43d7-82f3-4735f1693e90\" (UID: \"da2f49ee-105d-43d7-82f3-4735f1693e90\") " Oct 07 00:13:44 crc kubenswrapper[4791]: I1007 00:13:44.980976 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da2f49ee-105d-43d7-82f3-4735f1693e90-utilities" (OuterVolumeSpecName: "utilities") pod "da2f49ee-105d-43d7-82f3-4735f1693e90" (UID: "da2f49ee-105d-43d7-82f3-4735f1693e90"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:13:44 crc kubenswrapper[4791]: I1007 00:13:44.990253 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da2f49ee-105d-43d7-82f3-4735f1693e90-kube-api-access-8rtmb" (OuterVolumeSpecName: "kube-api-access-8rtmb") pod "da2f49ee-105d-43d7-82f3-4735f1693e90" (UID: "da2f49ee-105d-43d7-82f3-4735f1693e90"). InnerVolumeSpecName "kube-api-access-8rtmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:13:45 crc kubenswrapper[4791]: I1007 00:13:45.026300 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da2f49ee-105d-43d7-82f3-4735f1693e90-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "da2f49ee-105d-43d7-82f3-4735f1693e90" (UID: "da2f49ee-105d-43d7-82f3-4735f1693e90"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:13:45 crc kubenswrapper[4791]: I1007 00:13:45.076837 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da2f49ee-105d-43d7-82f3-4735f1693e90-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:45 crc kubenswrapper[4791]: I1007 00:13:45.076908 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rtmb\" (UniqueName: \"kubernetes.io/projected/da2f49ee-105d-43d7-82f3-4735f1693e90-kube-api-access-8rtmb\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:45 crc kubenswrapper[4791]: I1007 00:13:45.076928 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da2f49ee-105d-43d7-82f3-4735f1693e90-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:45 crc kubenswrapper[4791]: I1007 00:13:45.733991 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mpd57" event={"ID":"da2f49ee-105d-43d7-82f3-4735f1693e90","Type":"ContainerDied","Data":"fd5ca0e31867a32323699588a4174a2075004f4ba6b9f154e3c1be0288d0cc2d"} Oct 07 00:13:45 crc kubenswrapper[4791]: I1007 00:13:45.734057 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mpd57" Oct 07 00:13:45 crc kubenswrapper[4791]: I1007 00:13:45.735771 4791 scope.go:117] "RemoveContainer" containerID="791b6f56bda2c2fc463dd0131784e0ab59c2e4e5a620c61de6f5f31a20333987" Oct 07 00:13:45 crc kubenswrapper[4791]: I1007 00:13:45.759632 4791 scope.go:117] "RemoveContainer" containerID="9b958fc5fec4a28cea2713d565d593ebab0aed98d315d63ddbd8986afa6767ca" Oct 07 00:13:45 crc kubenswrapper[4791]: I1007 00:13:45.777800 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mpd57"] Oct 07 00:13:45 crc kubenswrapper[4791]: I1007 00:13:45.780645 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mpd57"] Oct 07 00:13:45 crc kubenswrapper[4791]: I1007 00:13:45.792169 4791 scope.go:117] "RemoveContainer" containerID="689d39e269fe57ecbedd9380b2b04c09bf0dc911c1146573eb8f80c517d977d7" Oct 07 00:13:46 crc kubenswrapper[4791]: I1007 00:13:46.076955 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da2f49ee-105d-43d7-82f3-4735f1693e90" path="/var/lib/kubelet/pods/da2f49ee-105d-43d7-82f3-4735f1693e90/volumes" Oct 07 00:13:51 crc kubenswrapper[4791]: I1007 00:13:51.839160 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:13:51 crc kubenswrapper[4791]: I1007 00:13:51.891321 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:13:52 crc kubenswrapper[4791]: I1007 00:13:52.462495 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:13:52 crc kubenswrapper[4791]: I1007 00:13:52.506990 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:13:52 crc kubenswrapper[4791]: I1007 00:13:52.864482 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k2bmb"] Oct 07 00:13:53 crc kubenswrapper[4791]: I1007 00:13:53.705948 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:13:53 crc kubenswrapper[4791]: I1007 00:13:53.795358 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-k2bmb" podUID="adc81096-c228-44a9-ad8d-befc0fd73127" containerName="registry-server" containerID="cri-o://7a62a8cd1aea4fb7207d0e96979f05a1d935265f0a3871cb5ad1fb4e3c1c17fa" gracePeriod=2 Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.031832 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.181823 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.305557 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc81096-c228-44a9-ad8d-befc0fd73127-catalog-content\") pod \"adc81096-c228-44a9-ad8d-befc0fd73127\" (UID: \"adc81096-c228-44a9-ad8d-befc0fd73127\") " Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.305642 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzwdf\" (UniqueName: \"kubernetes.io/projected/adc81096-c228-44a9-ad8d-befc0fd73127-kube-api-access-zzwdf\") pod \"adc81096-c228-44a9-ad8d-befc0fd73127\" (UID: \"adc81096-c228-44a9-ad8d-befc0fd73127\") " Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.305687 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc81096-c228-44a9-ad8d-befc0fd73127-utilities\") pod \"adc81096-c228-44a9-ad8d-befc0fd73127\" (UID: \"adc81096-c228-44a9-ad8d-befc0fd73127\") " Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.306807 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/adc81096-c228-44a9-ad8d-befc0fd73127-utilities" (OuterVolumeSpecName: "utilities") pod "adc81096-c228-44a9-ad8d-befc0fd73127" (UID: "adc81096-c228-44a9-ad8d-befc0fd73127"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.314950 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adc81096-c228-44a9-ad8d-befc0fd73127-kube-api-access-zzwdf" (OuterVolumeSpecName: "kube-api-access-zzwdf") pod "adc81096-c228-44a9-ad8d-befc0fd73127" (UID: "adc81096-c228-44a9-ad8d-befc0fd73127"). InnerVolumeSpecName "kube-api-access-zzwdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.350326 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/adc81096-c228-44a9-ad8d-befc0fd73127-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "adc81096-c228-44a9-ad8d-befc0fd73127" (UID: "adc81096-c228-44a9-ad8d-befc0fd73127"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.407107 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzwdf\" (UniqueName: \"kubernetes.io/projected/adc81096-c228-44a9-ad8d-befc0fd73127-kube-api-access-zzwdf\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.407141 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc81096-c228-44a9-ad8d-befc0fd73127-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.407154 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc81096-c228-44a9-ad8d-befc0fd73127-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.802760 4791 generic.go:334] "Generic (PLEG): container finished" podID="adc81096-c228-44a9-ad8d-befc0fd73127" containerID="7a62a8cd1aea4fb7207d0e96979f05a1d935265f0a3871cb5ad1fb4e3c1c17fa" exitCode=0 Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.802806 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2bmb" event={"ID":"adc81096-c228-44a9-ad8d-befc0fd73127","Type":"ContainerDied","Data":"7a62a8cd1aea4fb7207d0e96979f05a1d935265f0a3871cb5ad1fb4e3c1c17fa"} Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.802838 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k2bmb" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.802867 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2bmb" event={"ID":"adc81096-c228-44a9-ad8d-befc0fd73127","Type":"ContainerDied","Data":"9b38ad415729cc054a81fbff810803f12b205d7679e6eeb01b3d3e8a89020e0c"} Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.802886 4791 scope.go:117] "RemoveContainer" containerID="7a62a8cd1aea4fb7207d0e96979f05a1d935265f0a3871cb5ad1fb4e3c1c17fa" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.829061 4791 scope.go:117] "RemoveContainer" containerID="cdb39dd2b5b70d365f13438e5b27fbedc73ff3efd8d3991094ca2833e85865c9" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.835291 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k2bmb"] Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.838325 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-k2bmb"] Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.855697 4791 scope.go:117] "RemoveContainer" containerID="aefb0d125b5e21b6bcdc88da32889bb7f7c99f3a0a8aa1fa3f7a55264bb53833" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.883722 4791 scope.go:117] "RemoveContainer" containerID="7a62a8cd1aea4fb7207d0e96979f05a1d935265f0a3871cb5ad1fb4e3c1c17fa" Oct 07 00:13:54 crc kubenswrapper[4791]: E1007 00:13:54.884606 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a62a8cd1aea4fb7207d0e96979f05a1d935265f0a3871cb5ad1fb4e3c1c17fa\": container with ID starting with 7a62a8cd1aea4fb7207d0e96979f05a1d935265f0a3871cb5ad1fb4e3c1c17fa not found: ID does not exist" containerID="7a62a8cd1aea4fb7207d0e96979f05a1d935265f0a3871cb5ad1fb4e3c1c17fa" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.884678 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a62a8cd1aea4fb7207d0e96979f05a1d935265f0a3871cb5ad1fb4e3c1c17fa"} err="failed to get container status \"7a62a8cd1aea4fb7207d0e96979f05a1d935265f0a3871cb5ad1fb4e3c1c17fa\": rpc error: code = NotFound desc = could not find container \"7a62a8cd1aea4fb7207d0e96979f05a1d935265f0a3871cb5ad1fb4e3c1c17fa\": container with ID starting with 7a62a8cd1aea4fb7207d0e96979f05a1d935265f0a3871cb5ad1fb4e3c1c17fa not found: ID does not exist" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.884720 4791 scope.go:117] "RemoveContainer" containerID="cdb39dd2b5b70d365f13438e5b27fbedc73ff3efd8d3991094ca2833e85865c9" Oct 07 00:13:54 crc kubenswrapper[4791]: E1007 00:13:54.885375 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdb39dd2b5b70d365f13438e5b27fbedc73ff3efd8d3991094ca2833e85865c9\": container with ID starting with cdb39dd2b5b70d365f13438e5b27fbedc73ff3efd8d3991094ca2833e85865c9 not found: ID does not exist" containerID="cdb39dd2b5b70d365f13438e5b27fbedc73ff3efd8d3991094ca2833e85865c9" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.885511 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdb39dd2b5b70d365f13438e5b27fbedc73ff3efd8d3991094ca2833e85865c9"} err="failed to get container status \"cdb39dd2b5b70d365f13438e5b27fbedc73ff3efd8d3991094ca2833e85865c9\": rpc error: code = NotFound desc = could not find container \"cdb39dd2b5b70d365f13438e5b27fbedc73ff3efd8d3991094ca2833e85865c9\": container with ID starting with cdb39dd2b5b70d365f13438e5b27fbedc73ff3efd8d3991094ca2833e85865c9 not found: ID does not exist" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.885655 4791 scope.go:117] "RemoveContainer" containerID="aefb0d125b5e21b6bcdc88da32889bb7f7c99f3a0a8aa1fa3f7a55264bb53833" Oct 07 00:13:54 crc kubenswrapper[4791]: E1007 00:13:54.886036 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aefb0d125b5e21b6bcdc88da32889bb7f7c99f3a0a8aa1fa3f7a55264bb53833\": container with ID starting with aefb0d125b5e21b6bcdc88da32889bb7f7c99f3a0a8aa1fa3f7a55264bb53833 not found: ID does not exist" containerID="aefb0d125b5e21b6bcdc88da32889bb7f7c99f3a0a8aa1fa3f7a55264bb53833" Oct 07 00:13:54 crc kubenswrapper[4791]: I1007 00:13:54.886070 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aefb0d125b5e21b6bcdc88da32889bb7f7c99f3a0a8aa1fa3f7a55264bb53833"} err="failed to get container status \"aefb0d125b5e21b6bcdc88da32889bb7f7c99f3a0a8aa1fa3f7a55264bb53833\": rpc error: code = NotFound desc = could not find container \"aefb0d125b5e21b6bcdc88da32889bb7f7c99f3a0a8aa1fa3f7a55264bb53833\": container with ID starting with aefb0d125b5e21b6bcdc88da32889bb7f7c99f3a0a8aa1fa3f7a55264bb53833 not found: ID does not exist" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.066942 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcgrr"] Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.067472 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tcgrr" podUID="5eddb7f9-673c-452e-9b7d-ac1462bb0c66" containerName="registry-server" containerID="cri-o://ada386785a8d15c6ddcb12cf6349a1a58e24720f6df6032f1c33eef316dbb017" gracePeriod=2 Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.082238 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="adc81096-c228-44a9-ad8d-befc0fd73127" path="/var/lib/kubelet/pods/adc81096-c228-44a9-ad8d-befc0fd73127/volumes" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.467251 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.640427 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-catalog-content\") pod \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\" (UID: \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\") " Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.640545 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b44vl\" (UniqueName: \"kubernetes.io/projected/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-kube-api-access-b44vl\") pod \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\" (UID: \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\") " Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.640612 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-utilities\") pod \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\" (UID: \"5eddb7f9-673c-452e-9b7d-ac1462bb0c66\") " Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.641801 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-utilities" (OuterVolumeSpecName: "utilities") pod "5eddb7f9-673c-452e-9b7d-ac1462bb0c66" (UID: "5eddb7f9-673c-452e-9b7d-ac1462bb0c66"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.647887 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-kube-api-access-b44vl" (OuterVolumeSpecName: "kube-api-access-b44vl") pod "5eddb7f9-673c-452e-9b7d-ac1462bb0c66" (UID: "5eddb7f9-673c-452e-9b7d-ac1462bb0c66"). InnerVolumeSpecName "kube-api-access-b44vl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.660162 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5eddb7f9-673c-452e-9b7d-ac1462bb0c66" (UID: "5eddb7f9-673c-452e-9b7d-ac1462bb0c66"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.742694 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.742796 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.742815 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b44vl\" (UniqueName: \"kubernetes.io/projected/5eddb7f9-673c-452e-9b7d-ac1462bb0c66-kube-api-access-b44vl\") on node \"crc\" DevicePath \"\"" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.819242 4791 generic.go:334] "Generic (PLEG): container finished" podID="5eddb7f9-673c-452e-9b7d-ac1462bb0c66" containerID="ada386785a8d15c6ddcb12cf6349a1a58e24720f6df6032f1c33eef316dbb017" exitCode=0 Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.819298 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcgrr" event={"ID":"5eddb7f9-673c-452e-9b7d-ac1462bb0c66","Type":"ContainerDied","Data":"ada386785a8d15c6ddcb12cf6349a1a58e24720f6df6032f1c33eef316dbb017"} Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.819321 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tcgrr" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.819358 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcgrr" event={"ID":"5eddb7f9-673c-452e-9b7d-ac1462bb0c66","Type":"ContainerDied","Data":"501bb7b023e4b1da42c1fb5d4c1c93f64b4b43bdcebce20143f5afd603d364d5"} Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.819382 4791 scope.go:117] "RemoveContainer" containerID="ada386785a8d15c6ddcb12cf6349a1a58e24720f6df6032f1c33eef316dbb017" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.840121 4791 scope.go:117] "RemoveContainer" containerID="226a9b436fa4660b1e4b4a02c75e453dc4954be27b5c8231dc7ca544ea22835f" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.850255 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcgrr"] Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.854181 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcgrr"] Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.860737 4791 scope.go:117] "RemoveContainer" containerID="58475021ab279a8506072e2c9bfb0040f949343f4f52ca077b74c50de71cdf5c" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.873470 4791 scope.go:117] "RemoveContainer" containerID="ada386785a8d15c6ddcb12cf6349a1a58e24720f6df6032f1c33eef316dbb017" Oct 07 00:13:56 crc kubenswrapper[4791]: E1007 00:13:56.873771 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ada386785a8d15c6ddcb12cf6349a1a58e24720f6df6032f1c33eef316dbb017\": container with ID starting with ada386785a8d15c6ddcb12cf6349a1a58e24720f6df6032f1c33eef316dbb017 not found: ID does not exist" containerID="ada386785a8d15c6ddcb12cf6349a1a58e24720f6df6032f1c33eef316dbb017" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.873820 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ada386785a8d15c6ddcb12cf6349a1a58e24720f6df6032f1c33eef316dbb017"} err="failed to get container status \"ada386785a8d15c6ddcb12cf6349a1a58e24720f6df6032f1c33eef316dbb017\": rpc error: code = NotFound desc = could not find container \"ada386785a8d15c6ddcb12cf6349a1a58e24720f6df6032f1c33eef316dbb017\": container with ID starting with ada386785a8d15c6ddcb12cf6349a1a58e24720f6df6032f1c33eef316dbb017 not found: ID does not exist" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.873851 4791 scope.go:117] "RemoveContainer" containerID="226a9b436fa4660b1e4b4a02c75e453dc4954be27b5c8231dc7ca544ea22835f" Oct 07 00:13:56 crc kubenswrapper[4791]: E1007 00:13:56.874128 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"226a9b436fa4660b1e4b4a02c75e453dc4954be27b5c8231dc7ca544ea22835f\": container with ID starting with 226a9b436fa4660b1e4b4a02c75e453dc4954be27b5c8231dc7ca544ea22835f not found: ID does not exist" containerID="226a9b436fa4660b1e4b4a02c75e453dc4954be27b5c8231dc7ca544ea22835f" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.874162 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"226a9b436fa4660b1e4b4a02c75e453dc4954be27b5c8231dc7ca544ea22835f"} err="failed to get container status \"226a9b436fa4660b1e4b4a02c75e453dc4954be27b5c8231dc7ca544ea22835f\": rpc error: code = NotFound desc = could not find container \"226a9b436fa4660b1e4b4a02c75e453dc4954be27b5c8231dc7ca544ea22835f\": container with ID starting with 226a9b436fa4660b1e4b4a02c75e453dc4954be27b5c8231dc7ca544ea22835f not found: ID does not exist" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.874183 4791 scope.go:117] "RemoveContainer" containerID="58475021ab279a8506072e2c9bfb0040f949343f4f52ca077b74c50de71cdf5c" Oct 07 00:13:56 crc kubenswrapper[4791]: E1007 00:13:56.874462 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58475021ab279a8506072e2c9bfb0040f949343f4f52ca077b74c50de71cdf5c\": container with ID starting with 58475021ab279a8506072e2c9bfb0040f949343f4f52ca077b74c50de71cdf5c not found: ID does not exist" containerID="58475021ab279a8506072e2c9bfb0040f949343f4f52ca077b74c50de71cdf5c" Oct 07 00:13:56 crc kubenswrapper[4791]: I1007 00:13:56.874496 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58475021ab279a8506072e2c9bfb0040f949343f4f52ca077b74c50de71cdf5c"} err="failed to get container status \"58475021ab279a8506072e2c9bfb0040f949343f4f52ca077b74c50de71cdf5c\": rpc error: code = NotFound desc = could not find container \"58475021ab279a8506072e2c9bfb0040f949343f4f52ca077b74c50de71cdf5c\": container with ID starting with 58475021ab279a8506072e2c9bfb0040f949343f4f52ca077b74c50de71cdf5c not found: ID does not exist" Oct 07 00:13:58 crc kubenswrapper[4791]: I1007 00:13:58.083367 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5eddb7f9-673c-452e-9b7d-ac1462bb0c66" path="/var/lib/kubelet/pods/5eddb7f9-673c-452e-9b7d-ac1462bb0c66/volumes" Oct 07 00:14:06 crc kubenswrapper[4791]: I1007 00:14:06.937947 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" podUID="1aa3f88f-af84-47d7-84cd-0a195a373a57" containerName="oauth-openshift" containerID="cri-o://b6dedb9db3b689d52bbf99d733366cae89ca3eb0d9cf7c8813becb11d8ac1ada" gracePeriod=15 Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.284393 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.331571 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-54b5875b97-pq5fj"] Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.331849 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5276b6f-3881-45d2-92a0-1728a3231a09" containerName="pruner" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.331869 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5276b6f-3881-45d2-92a0-1728a3231a09" containerName="pruner" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.331883 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddb7f9-673c-452e-9b7d-ac1462bb0c66" containerName="extract-utilities" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.331892 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddb7f9-673c-452e-9b7d-ac1462bb0c66" containerName="extract-utilities" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.331904 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da2f49ee-105d-43d7-82f3-4735f1693e90" containerName="extract-content" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.331913 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="da2f49ee-105d-43d7-82f3-4735f1693e90" containerName="extract-content" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.331924 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce732612-3842-4023-a2ff-fce88ab94972" containerName="registry-server" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.331931 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce732612-3842-4023-a2ff-fce88ab94972" containerName="registry-server" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.331941 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddb7f9-673c-452e-9b7d-ac1462bb0c66" containerName="registry-server" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.331950 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddb7f9-673c-452e-9b7d-ac1462bb0c66" containerName="registry-server" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.331961 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20a4713c-cab0-4783-951b-1607d1d64c1d" containerName="collect-profiles" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.331968 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="20a4713c-cab0-4783-951b-1607d1d64c1d" containerName="collect-profiles" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.331980 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d7e250b-7e86-4989-986a-3f01c8ea7144" containerName="pruner" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.331987 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d7e250b-7e86-4989-986a-3f01c8ea7144" containerName="pruner" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.331998 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1aa3f88f-af84-47d7-84cd-0a195a373a57" containerName="oauth-openshift" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332006 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="1aa3f88f-af84-47d7-84cd-0a195a373a57" containerName="oauth-openshift" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.332016 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da2f49ee-105d-43d7-82f3-4735f1693e90" containerName="registry-server" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332023 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="da2f49ee-105d-43d7-82f3-4735f1693e90" containerName="registry-server" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.332032 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce732612-3842-4023-a2ff-fce88ab94972" containerName="extract-utilities" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332040 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce732612-3842-4023-a2ff-fce88ab94972" containerName="extract-utilities" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.332056 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adc81096-c228-44a9-ad8d-befc0fd73127" containerName="extract-content" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332067 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="adc81096-c228-44a9-ad8d-befc0fd73127" containerName="extract-content" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.332078 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da2f49ee-105d-43d7-82f3-4735f1693e90" containerName="extract-utilities" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332085 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="da2f49ee-105d-43d7-82f3-4735f1693e90" containerName="extract-utilities" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.332095 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce732612-3842-4023-a2ff-fce88ab94972" containerName="extract-content" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332101 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce732612-3842-4023-a2ff-fce88ab94972" containerName="extract-content" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.332109 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adc81096-c228-44a9-ad8d-befc0fd73127" containerName="extract-utilities" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332121 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="adc81096-c228-44a9-ad8d-befc0fd73127" containerName="extract-utilities" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.332134 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adc81096-c228-44a9-ad8d-befc0fd73127" containerName="registry-server" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332142 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="adc81096-c228-44a9-ad8d-befc0fd73127" containerName="registry-server" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.332156 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a1fa4d9-9173-47fd-bc14-68317d5adfa4" containerName="image-pruner" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332162 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a1fa4d9-9173-47fd-bc14-68317d5adfa4" containerName="image-pruner" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.332171 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddb7f9-673c-452e-9b7d-ac1462bb0c66" containerName="extract-content" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332178 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddb7f9-673c-452e-9b7d-ac1462bb0c66" containerName="extract-content" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332296 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a1fa4d9-9173-47fd-bc14-68317d5adfa4" containerName="image-pruner" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332315 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5276b6f-3881-45d2-92a0-1728a3231a09" containerName="pruner" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332328 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="da2f49ee-105d-43d7-82f3-4735f1693e90" containerName="registry-server" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332339 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="20a4713c-cab0-4783-951b-1607d1d64c1d" containerName="collect-profiles" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332349 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d7e250b-7e86-4989-986a-3f01c8ea7144" containerName="pruner" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332357 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="1aa3f88f-af84-47d7-84cd-0a195a373a57" containerName="oauth-openshift" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332367 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce732612-3842-4023-a2ff-fce88ab94972" containerName="registry-server" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332377 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eddb7f9-673c-452e-9b7d-ac1462bb0c66" containerName="registry-server" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332386 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="adc81096-c228-44a9-ad8d-befc0fd73127" containerName="registry-server" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.332906 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.347583 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-54b5875b97-pq5fj"] Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.382097 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-serving-cert\") pod \"1aa3f88f-af84-47d7-84cd-0a195a373a57\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.382187 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-audit-policies\") pod \"1aa3f88f-af84-47d7-84cd-0a195a373a57\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.382222 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-login\") pod \"1aa3f88f-af84-47d7-84cd-0a195a373a57\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.382251 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-router-certs\") pod \"1aa3f88f-af84-47d7-84cd-0a195a373a57\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.382285 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-provider-selection\") pod \"1aa3f88f-af84-47d7-84cd-0a195a373a57\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.382315 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-idp-0-file-data\") pod \"1aa3f88f-af84-47d7-84cd-0a195a373a57\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.382347 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-ocp-branding-template\") pod \"1aa3f88f-af84-47d7-84cd-0a195a373a57\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.382392 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-error\") pod \"1aa3f88f-af84-47d7-84cd-0a195a373a57\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.382472 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-service-ca\") pod \"1aa3f88f-af84-47d7-84cd-0a195a373a57\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.382501 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1aa3f88f-af84-47d7-84cd-0a195a373a57-audit-dir\") pod \"1aa3f88f-af84-47d7-84cd-0a195a373a57\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.382554 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxdhv\" (UniqueName: \"kubernetes.io/projected/1aa3f88f-af84-47d7-84cd-0a195a373a57-kube-api-access-sxdhv\") pod \"1aa3f88f-af84-47d7-84cd-0a195a373a57\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.382590 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-cliconfig\") pod \"1aa3f88f-af84-47d7-84cd-0a195a373a57\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.382623 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-session\") pod \"1aa3f88f-af84-47d7-84cd-0a195a373a57\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.382731 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-trusted-ca-bundle\") pod \"1aa3f88f-af84-47d7-84cd-0a195a373a57\" (UID: \"1aa3f88f-af84-47d7-84cd-0a195a373a57\") " Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.384188 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "1aa3f88f-af84-47d7-84cd-0a195a373a57" (UID: "1aa3f88f-af84-47d7-84cd-0a195a373a57"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.385048 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "1aa3f88f-af84-47d7-84cd-0a195a373a57" (UID: "1aa3f88f-af84-47d7-84cd-0a195a373a57"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.385121 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "1aa3f88f-af84-47d7-84cd-0a195a373a57" (UID: "1aa3f88f-af84-47d7-84cd-0a195a373a57"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.385450 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "1aa3f88f-af84-47d7-84cd-0a195a373a57" (UID: "1aa3f88f-af84-47d7-84cd-0a195a373a57"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.385487 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1aa3f88f-af84-47d7-84cd-0a195a373a57-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "1aa3f88f-af84-47d7-84cd-0a195a373a57" (UID: "1aa3f88f-af84-47d7-84cd-0a195a373a57"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.391360 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1aa3f88f-af84-47d7-84cd-0a195a373a57-kube-api-access-sxdhv" (OuterVolumeSpecName: "kube-api-access-sxdhv") pod "1aa3f88f-af84-47d7-84cd-0a195a373a57" (UID: "1aa3f88f-af84-47d7-84cd-0a195a373a57"). InnerVolumeSpecName "kube-api-access-sxdhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.396690 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "1aa3f88f-af84-47d7-84cd-0a195a373a57" (UID: "1aa3f88f-af84-47d7-84cd-0a195a373a57"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.397000 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "1aa3f88f-af84-47d7-84cd-0a195a373a57" (UID: "1aa3f88f-af84-47d7-84cd-0a195a373a57"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.397297 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "1aa3f88f-af84-47d7-84cd-0a195a373a57" (UID: "1aa3f88f-af84-47d7-84cd-0a195a373a57"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.397532 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "1aa3f88f-af84-47d7-84cd-0a195a373a57" (UID: "1aa3f88f-af84-47d7-84cd-0a195a373a57"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.405850 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "1aa3f88f-af84-47d7-84cd-0a195a373a57" (UID: "1aa3f88f-af84-47d7-84cd-0a195a373a57"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.406108 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "1aa3f88f-af84-47d7-84cd-0a195a373a57" (UID: "1aa3f88f-af84-47d7-84cd-0a195a373a57"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.406933 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "1aa3f88f-af84-47d7-84cd-0a195a373a57" (UID: "1aa3f88f-af84-47d7-84cd-0a195a373a57"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.407536 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "1aa3f88f-af84-47d7-84cd-0a195a373a57" (UID: "1aa3f88f-af84-47d7-84cd-0a195a373a57"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.485338 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-router-certs\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.485390 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.485430 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.485464 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.485493 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-user-template-login\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.485606 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-user-template-error\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.485641 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.485678 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.485703 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-session\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.485957 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-service-ca\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486045 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-audit-dir\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486080 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glvpm\" (UniqueName: \"kubernetes.io/projected/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-kube-api-access-glvpm\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486115 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486147 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-audit-policies\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486446 4791 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1aa3f88f-af84-47d7-84cd-0a195a373a57-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486501 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxdhv\" (UniqueName: \"kubernetes.io/projected/1aa3f88f-af84-47d7-84cd-0a195a373a57-kube-api-access-sxdhv\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486519 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486532 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486550 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486563 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486579 4791 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486593 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486604 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486631 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486647 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486660 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486672 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.486684 4791 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/1aa3f88f-af84-47d7-84cd-0a195a373a57-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.587813 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-user-template-error\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.587883 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.587928 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.587963 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-session\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.588002 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-audit-dir\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.588028 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-service-ca\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.588058 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glvpm\" (UniqueName: \"kubernetes.io/projected/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-kube-api-access-glvpm\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.588089 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.588115 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-audit-policies\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.588159 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-router-certs\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.588184 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.588212 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.588244 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.588281 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-user-template-login\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.588768 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-audit-dir\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.588942 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.589439 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-audit-policies\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.590095 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.591552 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-service-ca\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.592150 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.592201 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.592807 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-user-template-error\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.592912 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-session\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.594210 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-router-certs\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.594735 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-user-template-login\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.600298 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.600954 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.610999 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glvpm\" (UniqueName: \"kubernetes.io/projected/3e8d4a97-136f-4512-aff4-7f4ccf76bf8a-kube-api-access-glvpm\") pod \"oauth-openshift-54b5875b97-pq5fj\" (UID: \"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a\") " pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.649995 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.894190 4791 generic.go:334] "Generic (PLEG): container finished" podID="1aa3f88f-af84-47d7-84cd-0a195a373a57" containerID="b6dedb9db3b689d52bbf99d733366cae89ca3eb0d9cf7c8813becb11d8ac1ada" exitCode=0 Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.894263 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" event={"ID":"1aa3f88f-af84-47d7-84cd-0a195a373a57","Type":"ContainerDied","Data":"b6dedb9db3b689d52bbf99d733366cae89ca3eb0d9cf7c8813becb11d8ac1ada"} Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.894307 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" event={"ID":"1aa3f88f-af84-47d7-84cd-0a195a373a57","Type":"ContainerDied","Data":"1ae9a28714efb7ad3ff774aaf6bc3d998d15f9efddde55212ce1dc7400cf803a"} Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.894338 4791 scope.go:117] "RemoveContainer" containerID="b6dedb9db3b689d52bbf99d733366cae89ca3eb0d9cf7c8813becb11d8ac1ada" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.894569 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pvkht" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.904565 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-54b5875b97-pq5fj"] Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.925189 4791 scope.go:117] "RemoveContainer" containerID="b6dedb9db3b689d52bbf99d733366cae89ca3eb0d9cf7c8813becb11d8ac1ada" Oct 07 00:14:07 crc kubenswrapper[4791]: E1007 00:14:07.925803 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6dedb9db3b689d52bbf99d733366cae89ca3eb0d9cf7c8813becb11d8ac1ada\": container with ID starting with b6dedb9db3b689d52bbf99d733366cae89ca3eb0d9cf7c8813becb11d8ac1ada not found: ID does not exist" containerID="b6dedb9db3b689d52bbf99d733366cae89ca3eb0d9cf7c8813becb11d8ac1ada" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.925859 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6dedb9db3b689d52bbf99d733366cae89ca3eb0d9cf7c8813becb11d8ac1ada"} err="failed to get container status \"b6dedb9db3b689d52bbf99d733366cae89ca3eb0d9cf7c8813becb11d8ac1ada\": rpc error: code = NotFound desc = could not find container \"b6dedb9db3b689d52bbf99d733366cae89ca3eb0d9cf7c8813becb11d8ac1ada\": container with ID starting with b6dedb9db3b689d52bbf99d733366cae89ca3eb0d9cf7c8813becb11d8ac1ada not found: ID does not exist" Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.937292 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pvkht"] Oct 07 00:14:07 crc kubenswrapper[4791]: I1007 00:14:07.942320 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pvkht"] Oct 07 00:14:08 crc kubenswrapper[4791]: I1007 00:14:08.081117 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1aa3f88f-af84-47d7-84cd-0a195a373a57" path="/var/lib/kubelet/pods/1aa3f88f-af84-47d7-84cd-0a195a373a57/volumes" Oct 07 00:14:08 crc kubenswrapper[4791]: I1007 00:14:08.904496 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" event={"ID":"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a","Type":"ContainerStarted","Data":"b97539492352ff874379e81bf74d88d3cdc94e6b7b2aa2732cccf25ef0c2e261"} Oct 07 00:14:08 crc kubenswrapper[4791]: I1007 00:14:08.904580 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" event={"ID":"3e8d4a97-136f-4512-aff4-7f4ccf76bf8a","Type":"ContainerStarted","Data":"689ebd3a2154c70f0340b25bddd6939cee7619875329ec15126b259a862ca1e7"} Oct 07 00:14:08 crc kubenswrapper[4791]: I1007 00:14:08.905040 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:08 crc kubenswrapper[4791]: I1007 00:14:08.912175 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" Oct 07 00:14:08 crc kubenswrapper[4791]: I1007 00:14:08.935890 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-54b5875b97-pq5fj" podStartSLOduration=27.935868172 podStartE2EDuration="27.935868172s" podCreationTimestamp="2025-10-07 00:13:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:14:08.933067307 +0000 UTC m=+175.529004958" watchObservedRunningTime="2025-10-07 00:14:08.935868172 +0000 UTC m=+175.531805823" Oct 07 00:14:11 crc kubenswrapper[4791]: I1007 00:14:11.272898 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 00:14:11 crc kubenswrapper[4791]: I1007 00:14:11.601143 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:14:11 crc kubenswrapper[4791]: I1007 00:14:11.601218 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.549351 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t7lmb"] Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.550334 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t7lmb" podUID="969bde5b-aa90-48e4-9352-76feaaabdd8e" containerName="registry-server" containerID="cri-o://647931cbfed2325b0274373a146567bbc12e4a0e9a3a0458657695355effd5f0" gracePeriod=30 Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.565131 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pmlfq"] Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.565483 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pmlfq" podUID="e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" containerName="registry-server" containerID="cri-o://b9eb3d915919d1983661fbd87a80781332a0779a3ef98b062ae3f5e73e9a955d" gracePeriod=30 Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.589736 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-v5pt7"] Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.590533 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" podUID="82a62ffc-29d8-4597-a18c-6e13dbd2cce3" containerName="marketplace-operator" containerID="cri-o://ba745c5c3cbf03040a5667e3ba681c9fa91a1db9bc39bb183444bb607f6a45b3" gracePeriod=30 Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.616920 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vxlpb"] Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.617201 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vxlpb" podUID="aea8c166-6a54-4361-8dd8-49acde45cad2" containerName="registry-server" containerID="cri-o://371969781e05f6a41750d7a44ba01e25c7b374bcc9c57b0232007c992af5d575" gracePeriod=30 Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.648736 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l96f5"] Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.649087 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-l96f5" podUID="b2c2e58d-292d-4116-ac44-c02b2f60a742" containerName="registry-server" containerID="cri-o://d1426db41038678bacb452f8c9828aac6a729090c39af5eee5285b750f78112d" gracePeriod=30 Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.655201 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s4l9q"] Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.655905 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.666831 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s4l9q"] Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.771897 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xhrl\" (UniqueName: \"kubernetes.io/projected/9c998b5e-063d-4cb7-8eeb-f479d11a11ba-kube-api-access-5xhrl\") pod \"marketplace-operator-79b997595-s4l9q\" (UID: \"9c998b5e-063d-4cb7-8eeb-f479d11a11ba\") " pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.771970 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9c998b5e-063d-4cb7-8eeb-f479d11a11ba-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s4l9q\" (UID: \"9c998b5e-063d-4cb7-8eeb-f479d11a11ba\") " pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.772036 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c998b5e-063d-4cb7-8eeb-f479d11a11ba-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s4l9q\" (UID: \"9c998b5e-063d-4cb7-8eeb-f479d11a11ba\") " pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.874226 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9c998b5e-063d-4cb7-8eeb-f479d11a11ba-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s4l9q\" (UID: \"9c998b5e-063d-4cb7-8eeb-f479d11a11ba\") " pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.874331 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c998b5e-063d-4cb7-8eeb-f479d11a11ba-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s4l9q\" (UID: \"9c998b5e-063d-4cb7-8eeb-f479d11a11ba\") " pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.874369 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xhrl\" (UniqueName: \"kubernetes.io/projected/9c998b5e-063d-4cb7-8eeb-f479d11a11ba-kube-api-access-5xhrl\") pod \"marketplace-operator-79b997595-s4l9q\" (UID: \"9c998b5e-063d-4cb7-8eeb-f479d11a11ba\") " pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.881361 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c998b5e-063d-4cb7-8eeb-f479d11a11ba-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s4l9q\" (UID: \"9c998b5e-063d-4cb7-8eeb-f479d11a11ba\") " pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.893434 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9c998b5e-063d-4cb7-8eeb-f479d11a11ba-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s4l9q\" (UID: \"9c998b5e-063d-4cb7-8eeb-f479d11a11ba\") " pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.897816 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xhrl\" (UniqueName: \"kubernetes.io/projected/9c998b5e-063d-4cb7-8eeb-f479d11a11ba-kube-api-access-5xhrl\") pod \"marketplace-operator-79b997595-s4l9q\" (UID: \"9c998b5e-063d-4cb7-8eeb-f479d11a11ba\") " pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.974696 4791 generic.go:334] "Generic (PLEG): container finished" podID="969bde5b-aa90-48e4-9352-76feaaabdd8e" containerID="647931cbfed2325b0274373a146567bbc12e4a0e9a3a0458657695355effd5f0" exitCode=0 Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.974820 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t7lmb" event={"ID":"969bde5b-aa90-48e4-9352-76feaaabdd8e","Type":"ContainerDied","Data":"647931cbfed2325b0274373a146567bbc12e4a0e9a3a0458657695355effd5f0"} Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.978557 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.980802 4791 generic.go:334] "Generic (PLEG): container finished" podID="b2c2e58d-292d-4116-ac44-c02b2f60a742" containerID="d1426db41038678bacb452f8c9828aac6a729090c39af5eee5285b750f78112d" exitCode=0 Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.980904 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l96f5" event={"ID":"b2c2e58d-292d-4116-ac44-c02b2f60a742","Type":"ContainerDied","Data":"d1426db41038678bacb452f8c9828aac6a729090c39af5eee5285b750f78112d"} Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.984916 4791 generic.go:334] "Generic (PLEG): container finished" podID="e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" containerID="b9eb3d915919d1983661fbd87a80781332a0779a3ef98b062ae3f5e73e9a955d" exitCode=0 Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.985006 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pmlfq" event={"ID":"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5","Type":"ContainerDied","Data":"b9eb3d915919d1983661fbd87a80781332a0779a3ef98b062ae3f5e73e9a955d"} Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.988459 4791 generic.go:334] "Generic (PLEG): container finished" podID="aea8c166-6a54-4361-8dd8-49acde45cad2" containerID="371969781e05f6a41750d7a44ba01e25c7b374bcc9c57b0232007c992af5d575" exitCode=0 Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.988520 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vxlpb" event={"ID":"aea8c166-6a54-4361-8dd8-49acde45cad2","Type":"ContainerDied","Data":"371969781e05f6a41750d7a44ba01e25c7b374bcc9c57b0232007c992af5d575"} Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.990920 4791 generic.go:334] "Generic (PLEG): container finished" podID="82a62ffc-29d8-4597-a18c-6e13dbd2cce3" containerID="ba745c5c3cbf03040a5667e3ba681c9fa91a1db9bc39bb183444bb607f6a45b3" exitCode=0 Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.990945 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" event={"ID":"82a62ffc-29d8-4597-a18c-6e13dbd2cce3","Type":"ContainerDied","Data":"ba745c5c3cbf03040a5667e3ba681c9fa91a1db9bc39bb183444bb607f6a45b3"} Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.990963 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" event={"ID":"82a62ffc-29d8-4597-a18c-6e13dbd2cce3","Type":"ContainerDied","Data":"d87215121109a24119a6199338d27ae221d7228b8cd63792052870f085dedf91"} Oct 07 00:14:19 crc kubenswrapper[4791]: I1007 00:14:19.990976 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d87215121109a24119a6199338d27ae221d7228b8cd63792052870f085dedf91" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.101620 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.112081 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.132614 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.136935 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.183533 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284124 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-catalog-content\") pod \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\" (UID: \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284206 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/969bde5b-aa90-48e4-9352-76feaaabdd8e-utilities\") pod \"969bde5b-aa90-48e4-9352-76feaaabdd8e\" (UID: \"969bde5b-aa90-48e4-9352-76feaaabdd8e\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284262 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aea8c166-6a54-4361-8dd8-49acde45cad2-catalog-content\") pod \"aea8c166-6a54-4361-8dd8-49acde45cad2\" (UID: \"aea8c166-6a54-4361-8dd8-49acde45cad2\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284300 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmbs2\" (UniqueName: \"kubernetes.io/projected/b2c2e58d-292d-4116-ac44-c02b2f60a742-kube-api-access-wmbs2\") pod \"b2c2e58d-292d-4116-ac44-c02b2f60a742\" (UID: \"b2c2e58d-292d-4116-ac44-c02b2f60a742\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284345 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8tdf\" (UniqueName: \"kubernetes.io/projected/aea8c166-6a54-4361-8dd8-49acde45cad2-kube-api-access-n8tdf\") pod \"aea8c166-6a54-4361-8dd8-49acde45cad2\" (UID: \"aea8c166-6a54-4361-8dd8-49acde45cad2\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284376 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-utilities\") pod \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\" (UID: \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284427 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/969bde5b-aa90-48e4-9352-76feaaabdd8e-catalog-content\") pod \"969bde5b-aa90-48e4-9352-76feaaabdd8e\" (UID: \"969bde5b-aa90-48e4-9352-76feaaabdd8e\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284451 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aea8c166-6a54-4361-8dd8-49acde45cad2-utilities\") pod \"aea8c166-6a54-4361-8dd8-49acde45cad2\" (UID: \"aea8c166-6a54-4361-8dd8-49acde45cad2\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284478 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-marketplace-trusted-ca\") pod \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\" (UID: \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284500 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7cjc\" (UniqueName: \"kubernetes.io/projected/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-kube-api-access-t7cjc\") pod \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\" (UID: \"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284524 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2c2e58d-292d-4116-ac44-c02b2f60a742-utilities\") pod \"b2c2e58d-292d-4116-ac44-c02b2f60a742\" (UID: \"b2c2e58d-292d-4116-ac44-c02b2f60a742\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284567 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-marketplace-operator-metrics\") pod \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\" (UID: \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284594 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjfcj\" (UniqueName: \"kubernetes.io/projected/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-kube-api-access-wjfcj\") pod \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\" (UID: \"82a62ffc-29d8-4597-a18c-6e13dbd2cce3\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284616 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpd86\" (UniqueName: \"kubernetes.io/projected/969bde5b-aa90-48e4-9352-76feaaabdd8e-kube-api-access-kpd86\") pod \"969bde5b-aa90-48e4-9352-76feaaabdd8e\" (UID: \"969bde5b-aa90-48e4-9352-76feaaabdd8e\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.284642 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2c2e58d-292d-4116-ac44-c02b2f60a742-catalog-content\") pod \"b2c2e58d-292d-4116-ac44-c02b2f60a742\" (UID: \"b2c2e58d-292d-4116-ac44-c02b2f60a742\") " Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.285736 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2c2e58d-292d-4116-ac44-c02b2f60a742-utilities" (OuterVolumeSpecName: "utilities") pod "b2c2e58d-292d-4116-ac44-c02b2f60a742" (UID: "b2c2e58d-292d-4116-ac44-c02b2f60a742"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.285798 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aea8c166-6a54-4361-8dd8-49acde45cad2-utilities" (OuterVolumeSpecName: "utilities") pod "aea8c166-6a54-4361-8dd8-49acde45cad2" (UID: "aea8c166-6a54-4361-8dd8-49acde45cad2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.286897 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/969bde5b-aa90-48e4-9352-76feaaabdd8e-utilities" (OuterVolumeSpecName: "utilities") pod "969bde5b-aa90-48e4-9352-76feaaabdd8e" (UID: "969bde5b-aa90-48e4-9352-76feaaabdd8e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.286979 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "82a62ffc-29d8-4597-a18c-6e13dbd2cce3" (UID: "82a62ffc-29d8-4597-a18c-6e13dbd2cce3"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.291620 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-utilities" (OuterVolumeSpecName: "utilities") pod "e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" (UID: "e07ccf4f-8fc8-411f-a91d-804f9b82c1d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.292193 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aea8c166-6a54-4361-8dd8-49acde45cad2-kube-api-access-n8tdf" (OuterVolumeSpecName: "kube-api-access-n8tdf") pod "aea8c166-6a54-4361-8dd8-49acde45cad2" (UID: "aea8c166-6a54-4361-8dd8-49acde45cad2"). InnerVolumeSpecName "kube-api-access-n8tdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.293069 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-kube-api-access-t7cjc" (OuterVolumeSpecName: "kube-api-access-t7cjc") pod "e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" (UID: "e07ccf4f-8fc8-411f-a91d-804f9b82c1d5"). InnerVolumeSpecName "kube-api-access-t7cjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.293352 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/969bde5b-aa90-48e4-9352-76feaaabdd8e-kube-api-access-kpd86" (OuterVolumeSpecName: "kube-api-access-kpd86") pod "969bde5b-aa90-48e4-9352-76feaaabdd8e" (UID: "969bde5b-aa90-48e4-9352-76feaaabdd8e"). InnerVolumeSpecName "kube-api-access-kpd86". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.293414 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "82a62ffc-29d8-4597-a18c-6e13dbd2cce3" (UID: "82a62ffc-29d8-4597-a18c-6e13dbd2cce3"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.294447 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2c2e58d-292d-4116-ac44-c02b2f60a742-kube-api-access-wmbs2" (OuterVolumeSpecName: "kube-api-access-wmbs2") pod "b2c2e58d-292d-4116-ac44-c02b2f60a742" (UID: "b2c2e58d-292d-4116-ac44-c02b2f60a742"). InnerVolumeSpecName "kube-api-access-wmbs2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.300070 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-kube-api-access-wjfcj" (OuterVolumeSpecName: "kube-api-access-wjfcj") pod "82a62ffc-29d8-4597-a18c-6e13dbd2cce3" (UID: "82a62ffc-29d8-4597-a18c-6e13dbd2cce3"). InnerVolumeSpecName "kube-api-access-wjfcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.311934 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aea8c166-6a54-4361-8dd8-49acde45cad2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aea8c166-6a54-4361-8dd8-49acde45cad2" (UID: "aea8c166-6a54-4361-8dd8-49acde45cad2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.359250 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/969bde5b-aa90-48e4-9352-76feaaabdd8e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "969bde5b-aa90-48e4-9352-76feaaabdd8e" (UID: "969bde5b-aa90-48e4-9352-76feaaabdd8e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.360764 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" (UID: "e07ccf4f-8fc8-411f-a91d-804f9b82c1d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.386575 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aea8c166-6a54-4361-8dd8-49acde45cad2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.386609 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmbs2\" (UniqueName: \"kubernetes.io/projected/b2c2e58d-292d-4116-ac44-c02b2f60a742-kube-api-access-wmbs2\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.386622 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8tdf\" (UniqueName: \"kubernetes.io/projected/aea8c166-6a54-4361-8dd8-49acde45cad2-kube-api-access-n8tdf\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.386634 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.386645 4791 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.386654 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/969bde5b-aa90-48e4-9352-76feaaabdd8e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.386663 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aea8c166-6a54-4361-8dd8-49acde45cad2-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.386671 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7cjc\" (UniqueName: \"kubernetes.io/projected/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-kube-api-access-t7cjc\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.386679 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2c2e58d-292d-4116-ac44-c02b2f60a742-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.386688 4791 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.386699 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjfcj\" (UniqueName: \"kubernetes.io/projected/82a62ffc-29d8-4597-a18c-6e13dbd2cce3-kube-api-access-wjfcj\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.386709 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpd86\" (UniqueName: \"kubernetes.io/projected/969bde5b-aa90-48e4-9352-76feaaabdd8e-kube-api-access-kpd86\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.386719 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.386729 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/969bde5b-aa90-48e4-9352-76feaaabdd8e-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.393038 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2c2e58d-292d-4116-ac44-c02b2f60a742-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b2c2e58d-292d-4116-ac44-c02b2f60a742" (UID: "b2c2e58d-292d-4116-ac44-c02b2f60a742"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.457721 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s4l9q"] Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.488537 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2c2e58d-292d-4116-ac44-c02b2f60a742-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.998119 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" event={"ID":"9c998b5e-063d-4cb7-8eeb-f479d11a11ba","Type":"ContainerStarted","Data":"684cb7a94b4f36077977e619eae3dd5ff953b850be35d1ac03d36ea4c77176f6"} Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.998788 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" event={"ID":"9c998b5e-063d-4cb7-8eeb-f479d11a11ba","Type":"ContainerStarted","Data":"7866eb91fee4bfee4e708b6cb8dafaac1b226e1142f60f471207dee502083a6e"} Oct 07 00:14:20 crc kubenswrapper[4791]: I1007 00:14:20.998841 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.002088 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t7lmb" event={"ID":"969bde5b-aa90-48e4-9352-76feaaabdd8e","Type":"ContainerDied","Data":"7e4d9697b9ac2b8faa7b877e10bdfc733477ba673f50e4807a8ef7347898279d"} Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.002307 4791 scope.go:117] "RemoveContainer" containerID="647931cbfed2325b0274373a146567bbc12e4a0e9a3a0458657695355effd5f0" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.002574 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t7lmb" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.003672 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.006826 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l96f5" event={"ID":"b2c2e58d-292d-4116-ac44-c02b2f60a742","Type":"ContainerDied","Data":"b02f6801c7d5e97f51db5eb021f8605360a17172968a2ae7dd4fca29b0229f7d"} Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.006851 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l96f5" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.009418 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pmlfq" event={"ID":"e07ccf4f-8fc8-411f-a91d-804f9b82c1d5","Type":"ContainerDied","Data":"ebc801ffe4e401628adcc64444513c1a26feb712d50c0ed763fc33df9318cc09"} Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.009569 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pmlfq" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.012905 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-v5pt7" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.021974 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vxlpb" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.022886 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vxlpb" event={"ID":"aea8c166-6a54-4361-8dd8-49acde45cad2","Type":"ContainerDied","Data":"aef6fd06f59c689d9707819e94d2ad0e477b63f992c24ddf0747dab16ffb79fe"} Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.030236 4791 scope.go:117] "RemoveContainer" containerID="68882cd24ef0f4109264884180de873aed4336a06c1a6a16d81a0457b129e246" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.060503 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-s4l9q" podStartSLOduration=2.060475377 podStartE2EDuration="2.060475377s" podCreationTimestamp="2025-10-07 00:14:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:14:21.022711596 +0000 UTC m=+187.618649267" watchObservedRunningTime="2025-10-07 00:14:21.060475377 +0000 UTC m=+187.656413028" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.070280 4791 scope.go:117] "RemoveContainer" containerID="abbfe5fedbe2d59ec89782a4c9674ce9ec3aa428ecfd904162742baf57367276" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.085707 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t7lmb"] Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.090519 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t7lmb"] Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.098045 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l96f5"] Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.105173 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-l96f5"] Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.111205 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-v5pt7"] Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.112249 4791 scope.go:117] "RemoveContainer" containerID="d1426db41038678bacb452f8c9828aac6a729090c39af5eee5285b750f78112d" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.116350 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-v5pt7"] Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.126993 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vxlpb"] Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.130362 4791 scope.go:117] "RemoveContainer" containerID="749c24599ed39a6ceb291bba9586e52654f183492d2f5ba0dd507c822e19ce8a" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.130717 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vxlpb"] Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.134719 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pmlfq"] Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.137247 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pmlfq"] Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.152102 4791 scope.go:117] "RemoveContainer" containerID="f2598c09fab0bbcd96884a4e71cb3d9b211fc247b410b7a0ae247c619fb1e721" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.165542 4791 scope.go:117] "RemoveContainer" containerID="b9eb3d915919d1983661fbd87a80781332a0779a3ef98b062ae3f5e73e9a955d" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.180358 4791 scope.go:117] "RemoveContainer" containerID="f02e0c4dc32232b1a8addaf1547c12b5ac2301105ca7c8b80892c0fdb7b74b6d" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.194378 4791 scope.go:117] "RemoveContainer" containerID="ca6ffe3258922484a815b6e5c222be49113294dabc0b0f4bdb08212625f3aa88" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.209621 4791 scope.go:117] "RemoveContainer" containerID="371969781e05f6a41750d7a44ba01e25c7b374bcc9c57b0232007c992af5d575" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.224312 4791 scope.go:117] "RemoveContainer" containerID="7f773ec7e206cb8232d8d1c23c612be7e14f47c245dd4fc77588a848ec5a7bb4" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.238010 4791 scope.go:117] "RemoveContainer" containerID="22b0e7d861535822ecfe72d317af47439e746559986f57123b1b6f5333d622b2" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566142 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ztf96"] Oct 07 00:14:21 crc kubenswrapper[4791]: E1007 00:14:21.566457 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="969bde5b-aa90-48e4-9352-76feaaabdd8e" containerName="extract-content" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566475 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="969bde5b-aa90-48e4-9352-76feaaabdd8e" containerName="extract-content" Oct 07 00:14:21 crc kubenswrapper[4791]: E1007 00:14:21.566490 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="969bde5b-aa90-48e4-9352-76feaaabdd8e" containerName="extract-utilities" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566499 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="969bde5b-aa90-48e4-9352-76feaaabdd8e" containerName="extract-utilities" Oct 07 00:14:21 crc kubenswrapper[4791]: E1007 00:14:21.566515 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2c2e58d-292d-4116-ac44-c02b2f60a742" containerName="registry-server" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566523 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2c2e58d-292d-4116-ac44-c02b2f60a742" containerName="registry-server" Oct 07 00:14:21 crc kubenswrapper[4791]: E1007 00:14:21.566537 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" containerName="registry-server" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566545 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" containerName="registry-server" Oct 07 00:14:21 crc kubenswrapper[4791]: E1007 00:14:21.566562 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2c2e58d-292d-4116-ac44-c02b2f60a742" containerName="extract-utilities" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566573 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2c2e58d-292d-4116-ac44-c02b2f60a742" containerName="extract-utilities" Oct 07 00:14:21 crc kubenswrapper[4791]: E1007 00:14:21.566587 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aea8c166-6a54-4361-8dd8-49acde45cad2" containerName="registry-server" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566595 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="aea8c166-6a54-4361-8dd8-49acde45cad2" containerName="registry-server" Oct 07 00:14:21 crc kubenswrapper[4791]: E1007 00:14:21.566608 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" containerName="extract-content" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566618 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" containerName="extract-content" Oct 07 00:14:21 crc kubenswrapper[4791]: E1007 00:14:21.566632 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="969bde5b-aa90-48e4-9352-76feaaabdd8e" containerName="registry-server" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566641 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="969bde5b-aa90-48e4-9352-76feaaabdd8e" containerName="registry-server" Oct 07 00:14:21 crc kubenswrapper[4791]: E1007 00:14:21.566651 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aea8c166-6a54-4361-8dd8-49acde45cad2" containerName="extract-content" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566660 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="aea8c166-6a54-4361-8dd8-49acde45cad2" containerName="extract-content" Oct 07 00:14:21 crc kubenswrapper[4791]: E1007 00:14:21.566673 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" containerName="extract-utilities" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566681 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" containerName="extract-utilities" Oct 07 00:14:21 crc kubenswrapper[4791]: E1007 00:14:21.566695 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82a62ffc-29d8-4597-a18c-6e13dbd2cce3" containerName="marketplace-operator" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566704 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="82a62ffc-29d8-4597-a18c-6e13dbd2cce3" containerName="marketplace-operator" Oct 07 00:14:21 crc kubenswrapper[4791]: E1007 00:14:21.566715 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2c2e58d-292d-4116-ac44-c02b2f60a742" containerName="extract-content" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566724 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2c2e58d-292d-4116-ac44-c02b2f60a742" containerName="extract-content" Oct 07 00:14:21 crc kubenswrapper[4791]: E1007 00:14:21.566735 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aea8c166-6a54-4361-8dd8-49acde45cad2" containerName="extract-utilities" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566744 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="aea8c166-6a54-4361-8dd8-49acde45cad2" containerName="extract-utilities" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566862 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2c2e58d-292d-4116-ac44-c02b2f60a742" containerName="registry-server" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566878 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" containerName="registry-server" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566901 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="82a62ffc-29d8-4597-a18c-6e13dbd2cce3" containerName="marketplace-operator" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566914 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="969bde5b-aa90-48e4-9352-76feaaabdd8e" containerName="registry-server" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.566928 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="aea8c166-6a54-4361-8dd8-49acde45cad2" containerName="registry-server" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.567993 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.570050 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.578755 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztf96"] Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.703706 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-utilities\") pod \"redhat-marketplace-ztf96\" (UID: \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\") " pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.703757 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-catalog-content\") pod \"redhat-marketplace-ztf96\" (UID: \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\") " pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.703824 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk484\" (UniqueName: \"kubernetes.io/projected/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-kube-api-access-wk484\") pod \"redhat-marketplace-ztf96\" (UID: \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\") " pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.805123 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-catalog-content\") pod \"redhat-marketplace-ztf96\" (UID: \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\") " pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.805224 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk484\" (UniqueName: \"kubernetes.io/projected/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-kube-api-access-wk484\") pod \"redhat-marketplace-ztf96\" (UID: \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\") " pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.805270 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-utilities\") pod \"redhat-marketplace-ztf96\" (UID: \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\") " pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.805764 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-catalog-content\") pod \"redhat-marketplace-ztf96\" (UID: \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\") " pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.805771 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-utilities\") pod \"redhat-marketplace-ztf96\" (UID: \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\") " pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.824459 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk484\" (UniqueName: \"kubernetes.io/projected/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-kube-api-access-wk484\") pod \"redhat-marketplace-ztf96\" (UID: \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\") " pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:21 crc kubenswrapper[4791]: I1007 00:14:21.883812 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.085719 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82a62ffc-29d8-4597-a18c-6e13dbd2cce3" path="/var/lib/kubelet/pods/82a62ffc-29d8-4597-a18c-6e13dbd2cce3/volumes" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.086840 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="969bde5b-aa90-48e4-9352-76feaaabdd8e" path="/var/lib/kubelet/pods/969bde5b-aa90-48e4-9352-76feaaabdd8e/volumes" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.087777 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aea8c166-6a54-4361-8dd8-49acde45cad2" path="/var/lib/kubelet/pods/aea8c166-6a54-4361-8dd8-49acde45cad2/volumes" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.089166 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2c2e58d-292d-4116-ac44-c02b2f60a742" path="/var/lib/kubelet/pods/b2c2e58d-292d-4116-ac44-c02b2f60a742/volumes" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.090113 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e07ccf4f-8fc8-411f-a91d-804f9b82c1d5" path="/var/lib/kubelet/pods/e07ccf4f-8fc8-411f-a91d-804f9b82c1d5/volumes" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.127383 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztf96"] Oct 07 00:14:22 crc kubenswrapper[4791]: W1007 00:14:22.130741 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc7de000_f1f3_405f_b245_c1fbf2a23a6d.slice/crio-09c048df2fb91e53b628dc8f333c1a9a852ffe9dfc52bf5c6fb78dbed812dda3 WatchSource:0}: Error finding container 09c048df2fb91e53b628dc8f333c1a9a852ffe9dfc52bf5c6fb78dbed812dda3: Status 404 returned error can't find the container with id 09c048df2fb91e53b628dc8f333c1a9a852ffe9dfc52bf5c6fb78dbed812dda3 Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.172489 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m6x2v"] Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.175698 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.179987 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.181592 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m6x2v"] Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.311346 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f2100e2e-ec37-42cc-9e3e-d3bc94f7afec-catalog-content\") pod \"certified-operators-m6x2v\" (UID: \"f2100e2e-ec37-42cc-9e3e-d3bc94f7afec\") " pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.311770 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k56g6\" (UniqueName: \"kubernetes.io/projected/f2100e2e-ec37-42cc-9e3e-d3bc94f7afec-kube-api-access-k56g6\") pod \"certified-operators-m6x2v\" (UID: \"f2100e2e-ec37-42cc-9e3e-d3bc94f7afec\") " pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.311797 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f2100e2e-ec37-42cc-9e3e-d3bc94f7afec-utilities\") pod \"certified-operators-m6x2v\" (UID: \"f2100e2e-ec37-42cc-9e3e-d3bc94f7afec\") " pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.412687 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f2100e2e-ec37-42cc-9e3e-d3bc94f7afec-utilities\") pod \"certified-operators-m6x2v\" (UID: \"f2100e2e-ec37-42cc-9e3e-d3bc94f7afec\") " pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.412756 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f2100e2e-ec37-42cc-9e3e-d3bc94f7afec-catalog-content\") pod \"certified-operators-m6x2v\" (UID: \"f2100e2e-ec37-42cc-9e3e-d3bc94f7afec\") " pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.412808 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k56g6\" (UniqueName: \"kubernetes.io/projected/f2100e2e-ec37-42cc-9e3e-d3bc94f7afec-kube-api-access-k56g6\") pod \"certified-operators-m6x2v\" (UID: \"f2100e2e-ec37-42cc-9e3e-d3bc94f7afec\") " pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.413338 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f2100e2e-ec37-42cc-9e3e-d3bc94f7afec-catalog-content\") pod \"certified-operators-m6x2v\" (UID: \"f2100e2e-ec37-42cc-9e3e-d3bc94f7afec\") " pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.413364 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f2100e2e-ec37-42cc-9e3e-d3bc94f7afec-utilities\") pod \"certified-operators-m6x2v\" (UID: \"f2100e2e-ec37-42cc-9e3e-d3bc94f7afec\") " pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.433912 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k56g6\" (UniqueName: \"kubernetes.io/projected/f2100e2e-ec37-42cc-9e3e-d3bc94f7afec-kube-api-access-k56g6\") pod \"certified-operators-m6x2v\" (UID: \"f2100e2e-ec37-42cc-9e3e-d3bc94f7afec\") " pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.532519 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:22 crc kubenswrapper[4791]: I1007 00:14:22.737766 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m6x2v"] Oct 07 00:14:23 crc kubenswrapper[4791]: I1007 00:14:23.029495 4791 generic.go:334] "Generic (PLEG): container finished" podID="dc7de000-f1f3-405f-b245-c1fbf2a23a6d" containerID="a61a9b909e9f6f635fc7943463a28a9e7c45889f070e88b22c78e6aec67a1cad" exitCode=0 Oct 07 00:14:23 crc kubenswrapper[4791]: I1007 00:14:23.029590 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztf96" event={"ID":"dc7de000-f1f3-405f-b245-c1fbf2a23a6d","Type":"ContainerDied","Data":"a61a9b909e9f6f635fc7943463a28a9e7c45889f070e88b22c78e6aec67a1cad"} Oct 07 00:14:23 crc kubenswrapper[4791]: I1007 00:14:23.029625 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztf96" event={"ID":"dc7de000-f1f3-405f-b245-c1fbf2a23a6d","Type":"ContainerStarted","Data":"09c048df2fb91e53b628dc8f333c1a9a852ffe9dfc52bf5c6fb78dbed812dda3"} Oct 07 00:14:23 crc kubenswrapper[4791]: I1007 00:14:23.031466 4791 generic.go:334] "Generic (PLEG): container finished" podID="f2100e2e-ec37-42cc-9e3e-d3bc94f7afec" containerID="adc8dcbdc16d0e6bfbb53e89904b96766c8e175f93af55989fd796b80d016243" exitCode=0 Oct 07 00:14:23 crc kubenswrapper[4791]: I1007 00:14:23.031561 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m6x2v" event={"ID":"f2100e2e-ec37-42cc-9e3e-d3bc94f7afec","Type":"ContainerDied","Data":"adc8dcbdc16d0e6bfbb53e89904b96766c8e175f93af55989fd796b80d016243"} Oct 07 00:14:23 crc kubenswrapper[4791]: I1007 00:14:23.031596 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m6x2v" event={"ID":"f2100e2e-ec37-42cc-9e3e-d3bc94f7afec","Type":"ContainerStarted","Data":"f531efd57cdd3535522dc7c484217f086298aa65dfb667ccd91b61b4313dbf40"} Oct 07 00:14:23 crc kubenswrapper[4791]: I1007 00:14:23.963319 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-klxwq"] Oct 07 00:14:23 crc kubenswrapper[4791]: I1007 00:14:23.964780 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:23 crc kubenswrapper[4791]: I1007 00:14:23.966524 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 07 00:14:23 crc kubenswrapper[4791]: I1007 00:14:23.977302 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-klxwq"] Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.040750 4791 generic.go:334] "Generic (PLEG): container finished" podID="dc7de000-f1f3-405f-b245-c1fbf2a23a6d" containerID="be8979dd8d2ea3ff78d4533153b228c0c6a6c7e68c84cd89b32dca283f2a1990" exitCode=0 Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.040832 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztf96" event={"ID":"dc7de000-f1f3-405f-b245-c1fbf2a23a6d","Type":"ContainerDied","Data":"be8979dd8d2ea3ff78d4533153b228c0c6a6c7e68c84cd89b32dca283f2a1990"} Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.043367 4791 generic.go:334] "Generic (PLEG): container finished" podID="f2100e2e-ec37-42cc-9e3e-d3bc94f7afec" containerID="ec32cfc12c688149c8030d09a7e9aa391a7ca5272afa82b786f9d95cc5152ce7" exitCode=0 Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.043415 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m6x2v" event={"ID":"f2100e2e-ec37-42cc-9e3e-d3bc94f7afec","Type":"ContainerDied","Data":"ec32cfc12c688149c8030d09a7e9aa391a7ca5272afa82b786f9d95cc5152ce7"} Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.133900 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/034480a5-7c4f-48ee-86ad-d358e746e74b-catalog-content\") pod \"redhat-operators-klxwq\" (UID: \"034480a5-7c4f-48ee-86ad-d358e746e74b\") " pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.134040 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/034480a5-7c4f-48ee-86ad-d358e746e74b-utilities\") pod \"redhat-operators-klxwq\" (UID: \"034480a5-7c4f-48ee-86ad-d358e746e74b\") " pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.134091 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c98dk\" (UniqueName: \"kubernetes.io/projected/034480a5-7c4f-48ee-86ad-d358e746e74b-kube-api-access-c98dk\") pod \"redhat-operators-klxwq\" (UID: \"034480a5-7c4f-48ee-86ad-d358e746e74b\") " pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.235339 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c98dk\" (UniqueName: \"kubernetes.io/projected/034480a5-7c4f-48ee-86ad-d358e746e74b-kube-api-access-c98dk\") pod \"redhat-operators-klxwq\" (UID: \"034480a5-7c4f-48ee-86ad-d358e746e74b\") " pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.236142 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/034480a5-7c4f-48ee-86ad-d358e746e74b-catalog-content\") pod \"redhat-operators-klxwq\" (UID: \"034480a5-7c4f-48ee-86ad-d358e746e74b\") " pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.236591 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/034480a5-7c4f-48ee-86ad-d358e746e74b-catalog-content\") pod \"redhat-operators-klxwq\" (UID: \"034480a5-7c4f-48ee-86ad-d358e746e74b\") " pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.236689 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/034480a5-7c4f-48ee-86ad-d358e746e74b-utilities\") pod \"redhat-operators-klxwq\" (UID: \"034480a5-7c4f-48ee-86ad-d358e746e74b\") " pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.237330 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/034480a5-7c4f-48ee-86ad-d358e746e74b-utilities\") pod \"redhat-operators-klxwq\" (UID: \"034480a5-7c4f-48ee-86ad-d358e746e74b\") " pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.265100 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c98dk\" (UniqueName: \"kubernetes.io/projected/034480a5-7c4f-48ee-86ad-d358e746e74b-kube-api-access-c98dk\") pod \"redhat-operators-klxwq\" (UID: \"034480a5-7c4f-48ee-86ad-d358e746e74b\") " pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.294749 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.494777 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-klxwq"] Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.571112 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nlxvp"] Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.572580 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.575263 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nlxvp"] Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.579737 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.743977 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c-catalog-content\") pod \"community-operators-nlxvp\" (UID: \"4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c\") " pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.744769 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c-utilities\") pod \"community-operators-nlxvp\" (UID: \"4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c\") " pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.744857 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h55kc\" (UniqueName: \"kubernetes.io/projected/4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c-kube-api-access-h55kc\") pod \"community-operators-nlxvp\" (UID: \"4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c\") " pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.845969 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c-catalog-content\") pod \"community-operators-nlxvp\" (UID: \"4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c\") " pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.846076 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c-utilities\") pod \"community-operators-nlxvp\" (UID: \"4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c\") " pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.846112 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h55kc\" (UniqueName: \"kubernetes.io/projected/4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c-kube-api-access-h55kc\") pod \"community-operators-nlxvp\" (UID: \"4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c\") " pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.846592 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c-utilities\") pod \"community-operators-nlxvp\" (UID: \"4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c\") " pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.846860 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c-catalog-content\") pod \"community-operators-nlxvp\" (UID: \"4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c\") " pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.870698 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h55kc\" (UniqueName: \"kubernetes.io/projected/4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c-kube-api-access-h55kc\") pod \"community-operators-nlxvp\" (UID: \"4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c\") " pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:24 crc kubenswrapper[4791]: I1007 00:14:24.898109 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:25 crc kubenswrapper[4791]: I1007 00:14:25.054145 4791 generic.go:334] "Generic (PLEG): container finished" podID="034480a5-7c4f-48ee-86ad-d358e746e74b" containerID="2a74852cddd624a150e081c4fa55823127533a4865e66c0abc09f2694bd580f2" exitCode=0 Oct 07 00:14:25 crc kubenswrapper[4791]: I1007 00:14:25.054241 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-klxwq" event={"ID":"034480a5-7c4f-48ee-86ad-d358e746e74b","Type":"ContainerDied","Data":"2a74852cddd624a150e081c4fa55823127533a4865e66c0abc09f2694bd580f2"} Oct 07 00:14:25 crc kubenswrapper[4791]: I1007 00:14:25.054716 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-klxwq" event={"ID":"034480a5-7c4f-48ee-86ad-d358e746e74b","Type":"ContainerStarted","Data":"4067f7b54843cfbe1827ff2f259ae42c3563eedc62fc54c4a5ffad21cc8d3e55"} Oct 07 00:14:25 crc kubenswrapper[4791]: I1007 00:14:25.065107 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztf96" event={"ID":"dc7de000-f1f3-405f-b245-c1fbf2a23a6d","Type":"ContainerStarted","Data":"244a3a942353bfb40bd996d71389d8bb9f848249fddd520f454d7edda9d4d40d"} Oct 07 00:14:25 crc kubenswrapper[4791]: I1007 00:14:25.084828 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m6x2v" event={"ID":"f2100e2e-ec37-42cc-9e3e-d3bc94f7afec","Type":"ContainerStarted","Data":"dda04ed2f443b0dff62ae047fc2d3367c44676b00251a318fa1337281f5d2aba"} Oct 07 00:14:25 crc kubenswrapper[4791]: I1007 00:14:25.110696 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ztf96" podStartSLOduration=2.70546131 podStartE2EDuration="4.110663105s" podCreationTimestamp="2025-10-07 00:14:21 +0000 UTC" firstStartedPulling="2025-10-07 00:14:23.030905051 +0000 UTC m=+189.626842712" lastFinishedPulling="2025-10-07 00:14:24.436106856 +0000 UTC m=+191.032044507" observedRunningTime="2025-10-07 00:14:25.104181669 +0000 UTC m=+191.700119330" watchObservedRunningTime="2025-10-07 00:14:25.110663105 +0000 UTC m=+191.706600756" Oct 07 00:14:25 crc kubenswrapper[4791]: I1007 00:14:25.135783 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m6x2v" podStartSLOduration=1.545929481 podStartE2EDuration="3.135764354s" podCreationTimestamp="2025-10-07 00:14:22 +0000 UTC" firstStartedPulling="2025-10-07 00:14:23.033635273 +0000 UTC m=+189.629572924" lastFinishedPulling="2025-10-07 00:14:24.623470146 +0000 UTC m=+191.219407797" observedRunningTime="2025-10-07 00:14:25.13267046 +0000 UTC m=+191.728608131" watchObservedRunningTime="2025-10-07 00:14:25.135764354 +0000 UTC m=+191.731702005" Oct 07 00:14:25 crc kubenswrapper[4791]: I1007 00:14:25.139208 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nlxvp"] Oct 07 00:14:25 crc kubenswrapper[4791]: W1007 00:14:25.157724 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4cc8cf38_a2c7_477d_81f6_d5f5a8130e1c.slice/crio-f8f4e7295038c42bafc49f78680dee4cf77ad0a2114a26bdb30997943d3976d3 WatchSource:0}: Error finding container f8f4e7295038c42bafc49f78680dee4cf77ad0a2114a26bdb30997943d3976d3: Status 404 returned error can't find the container with id f8f4e7295038c42bafc49f78680dee4cf77ad0a2114a26bdb30997943d3976d3 Oct 07 00:14:26 crc kubenswrapper[4791]: I1007 00:14:26.110902 4791 generic.go:334] "Generic (PLEG): container finished" podID="4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c" containerID="b4adb8d183a969d47a8388081e3fa470ebefc3bda1c3e08c418497d60fe0a4c7" exitCode=0 Oct 07 00:14:26 crc kubenswrapper[4791]: I1007 00:14:26.112382 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nlxvp" event={"ID":"4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c","Type":"ContainerDied","Data":"b4adb8d183a969d47a8388081e3fa470ebefc3bda1c3e08c418497d60fe0a4c7"} Oct 07 00:14:26 crc kubenswrapper[4791]: I1007 00:14:26.112504 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nlxvp" event={"ID":"4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c","Type":"ContainerStarted","Data":"f8f4e7295038c42bafc49f78680dee4cf77ad0a2114a26bdb30997943d3976d3"} Oct 07 00:14:27 crc kubenswrapper[4791]: I1007 00:14:27.118255 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nlxvp" event={"ID":"4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c","Type":"ContainerStarted","Data":"50bdf46533ba5fca843664bfc69b98657ddde9eced3ef7071708f4d0eef580b1"} Oct 07 00:14:27 crc kubenswrapper[4791]: I1007 00:14:27.122258 4791 generic.go:334] "Generic (PLEG): container finished" podID="034480a5-7c4f-48ee-86ad-d358e746e74b" containerID="9e9fa16dab02ab4ebb63d6788069784ee3290119aad9d2dbe14f719d7aed72da" exitCode=0 Oct 07 00:14:27 crc kubenswrapper[4791]: I1007 00:14:27.122323 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-klxwq" event={"ID":"034480a5-7c4f-48ee-86ad-d358e746e74b","Type":"ContainerDied","Data":"9e9fa16dab02ab4ebb63d6788069784ee3290119aad9d2dbe14f719d7aed72da"} Oct 07 00:14:28 crc kubenswrapper[4791]: I1007 00:14:28.129110 4791 generic.go:334] "Generic (PLEG): container finished" podID="4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c" containerID="50bdf46533ba5fca843664bfc69b98657ddde9eced3ef7071708f4d0eef580b1" exitCode=0 Oct 07 00:14:28 crc kubenswrapper[4791]: I1007 00:14:28.129363 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nlxvp" event={"ID":"4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c","Type":"ContainerDied","Data":"50bdf46533ba5fca843664bfc69b98657ddde9eced3ef7071708f4d0eef580b1"} Oct 07 00:14:29 crc kubenswrapper[4791]: I1007 00:14:29.140931 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-klxwq" event={"ID":"034480a5-7c4f-48ee-86ad-d358e746e74b","Type":"ContainerStarted","Data":"2ea3764d76e7b7cb6634299cc502c247689d3553fc3d6e4f811cc0b592158b8a"} Oct 07 00:14:29 crc kubenswrapper[4791]: I1007 00:14:29.143237 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nlxvp" event={"ID":"4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c","Type":"ContainerStarted","Data":"4216438039a43fdedccff2ef0cd226d2fa88168d1a6f793eb188f1e97899298a"} Oct 07 00:14:29 crc kubenswrapper[4791]: I1007 00:14:29.165805 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-klxwq" podStartSLOduration=3.650433355 podStartE2EDuration="6.165780551s" podCreationTimestamp="2025-10-07 00:14:23 +0000 UTC" firstStartedPulling="2025-10-07 00:14:25.065497731 +0000 UTC m=+191.661435382" lastFinishedPulling="2025-10-07 00:14:27.580844927 +0000 UTC m=+194.176782578" observedRunningTime="2025-10-07 00:14:29.161833092 +0000 UTC m=+195.757770753" watchObservedRunningTime="2025-10-07 00:14:29.165780551 +0000 UTC m=+195.761718202" Oct 07 00:14:29 crc kubenswrapper[4791]: I1007 00:14:29.179361 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nlxvp" podStartSLOduration=2.734741234 podStartE2EDuration="5.179332161s" podCreationTimestamp="2025-10-07 00:14:24 +0000 UTC" firstStartedPulling="2025-10-07 00:14:26.114127303 +0000 UTC m=+192.710064944" lastFinishedPulling="2025-10-07 00:14:28.55871822 +0000 UTC m=+195.154655871" observedRunningTime="2025-10-07 00:14:29.17767157 +0000 UTC m=+195.773609221" watchObservedRunningTime="2025-10-07 00:14:29.179332161 +0000 UTC m=+195.775269812" Oct 07 00:14:31 crc kubenswrapper[4791]: I1007 00:14:31.884831 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:31 crc kubenswrapper[4791]: I1007 00:14:31.885252 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:31 crc kubenswrapper[4791]: I1007 00:14:31.928198 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:32 crc kubenswrapper[4791]: I1007 00:14:32.207279 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:14:32 crc kubenswrapper[4791]: I1007 00:14:32.533234 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:32 crc kubenswrapper[4791]: I1007 00:14:32.533295 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:32 crc kubenswrapper[4791]: I1007 00:14:32.581874 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:33 crc kubenswrapper[4791]: I1007 00:14:33.220609 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m6x2v" Oct 07 00:14:34 crc kubenswrapper[4791]: I1007 00:14:34.295830 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:34 crc kubenswrapper[4791]: I1007 00:14:34.295908 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:34 crc kubenswrapper[4791]: I1007 00:14:34.343808 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:34 crc kubenswrapper[4791]: I1007 00:14:34.898609 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:34 crc kubenswrapper[4791]: I1007 00:14:34.899175 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:34 crc kubenswrapper[4791]: I1007 00:14:34.944127 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:35 crc kubenswrapper[4791]: I1007 00:14:35.247588 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nlxvp" Oct 07 00:14:35 crc kubenswrapper[4791]: I1007 00:14:35.267427 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-klxwq" Oct 07 00:14:41 crc kubenswrapper[4791]: I1007 00:14:41.600863 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:14:41 crc kubenswrapper[4791]: I1007 00:14:41.601493 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:14:41 crc kubenswrapper[4791]: I1007 00:14:41.601544 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:14:41 crc kubenswrapper[4791]: I1007 00:14:41.602097 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b"} pod="openshift-machine-config-operator/machine-config-daemon-h728c" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 00:14:41 crc kubenswrapper[4791]: I1007 00:14:41.602157 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" containerID="cri-o://b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b" gracePeriod=600 Oct 07 00:14:42 crc kubenswrapper[4791]: I1007 00:14:42.232274 4791 generic.go:334] "Generic (PLEG): container finished" podID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerID="b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b" exitCode=0 Oct 07 00:14:42 crc kubenswrapper[4791]: I1007 00:14:42.232366 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerDied","Data":"b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b"} Oct 07 00:14:42 crc kubenswrapper[4791]: I1007 00:14:42.232707 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerStarted","Data":"88997fcb013242215faf11bfd8064aa2df1ecd9fcdd1623ba0f00e9805ada21b"} Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.136855 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt"] Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.138437 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.140460 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.143807 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.151923 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt"] Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.203668 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz9qb\" (UniqueName: \"kubernetes.io/projected/0d90400b-7efa-4d73-9c25-0439c8ed6279-kube-api-access-zz9qb\") pod \"collect-profiles-29329935-96zwt\" (UID: \"0d90400b-7efa-4d73-9c25-0439c8ed6279\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.203724 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0d90400b-7efa-4d73-9c25-0439c8ed6279-secret-volume\") pod \"collect-profiles-29329935-96zwt\" (UID: \"0d90400b-7efa-4d73-9c25-0439c8ed6279\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.203924 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0d90400b-7efa-4d73-9c25-0439c8ed6279-config-volume\") pod \"collect-profiles-29329935-96zwt\" (UID: \"0d90400b-7efa-4d73-9c25-0439c8ed6279\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.305181 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz9qb\" (UniqueName: \"kubernetes.io/projected/0d90400b-7efa-4d73-9c25-0439c8ed6279-kube-api-access-zz9qb\") pod \"collect-profiles-29329935-96zwt\" (UID: \"0d90400b-7efa-4d73-9c25-0439c8ed6279\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.305264 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0d90400b-7efa-4d73-9c25-0439c8ed6279-secret-volume\") pod \"collect-profiles-29329935-96zwt\" (UID: \"0d90400b-7efa-4d73-9c25-0439c8ed6279\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.305319 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0d90400b-7efa-4d73-9c25-0439c8ed6279-config-volume\") pod \"collect-profiles-29329935-96zwt\" (UID: \"0d90400b-7efa-4d73-9c25-0439c8ed6279\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.306369 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0d90400b-7efa-4d73-9c25-0439c8ed6279-config-volume\") pod \"collect-profiles-29329935-96zwt\" (UID: \"0d90400b-7efa-4d73-9c25-0439c8ed6279\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.322877 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0d90400b-7efa-4d73-9c25-0439c8ed6279-secret-volume\") pod \"collect-profiles-29329935-96zwt\" (UID: \"0d90400b-7efa-4d73-9c25-0439c8ed6279\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.334601 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz9qb\" (UniqueName: \"kubernetes.io/projected/0d90400b-7efa-4d73-9c25-0439c8ed6279-kube-api-access-zz9qb\") pod \"collect-profiles-29329935-96zwt\" (UID: \"0d90400b-7efa-4d73-9c25-0439c8ed6279\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.512977 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" Oct 07 00:15:00 crc kubenswrapper[4791]: I1007 00:15:00.960380 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt"] Oct 07 00:15:00 crc kubenswrapper[4791]: W1007 00:15:00.969014 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d90400b_7efa_4d73_9c25_0439c8ed6279.slice/crio-639c3960ef990c6779fd1e1ec18db6152918496a498c9b3c76e660fb6edebf9b WatchSource:0}: Error finding container 639c3960ef990c6779fd1e1ec18db6152918496a498c9b3c76e660fb6edebf9b: Status 404 returned error can't find the container with id 639c3960ef990c6779fd1e1ec18db6152918496a498c9b3c76e660fb6edebf9b Oct 07 00:15:01 crc kubenswrapper[4791]: I1007 00:15:01.345784 4791 generic.go:334] "Generic (PLEG): container finished" podID="0d90400b-7efa-4d73-9c25-0439c8ed6279" containerID="bf630cbf45a38d297165763c6b8323be9e16c0d4e9f3d0d08157de29c047eca4" exitCode=0 Oct 07 00:15:01 crc kubenswrapper[4791]: I1007 00:15:01.346053 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" event={"ID":"0d90400b-7efa-4d73-9c25-0439c8ed6279","Type":"ContainerDied","Data":"bf630cbf45a38d297165763c6b8323be9e16c0d4e9f3d0d08157de29c047eca4"} Oct 07 00:15:01 crc kubenswrapper[4791]: I1007 00:15:01.346716 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" event={"ID":"0d90400b-7efa-4d73-9c25-0439c8ed6279","Type":"ContainerStarted","Data":"639c3960ef990c6779fd1e1ec18db6152918496a498c9b3c76e660fb6edebf9b"} Oct 07 00:15:02 crc kubenswrapper[4791]: I1007 00:15:02.627017 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" Oct 07 00:15:02 crc kubenswrapper[4791]: I1007 00:15:02.742212 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zz9qb\" (UniqueName: \"kubernetes.io/projected/0d90400b-7efa-4d73-9c25-0439c8ed6279-kube-api-access-zz9qb\") pod \"0d90400b-7efa-4d73-9c25-0439c8ed6279\" (UID: \"0d90400b-7efa-4d73-9c25-0439c8ed6279\") " Oct 07 00:15:02 crc kubenswrapper[4791]: I1007 00:15:02.742392 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0d90400b-7efa-4d73-9c25-0439c8ed6279-config-volume\") pod \"0d90400b-7efa-4d73-9c25-0439c8ed6279\" (UID: \"0d90400b-7efa-4d73-9c25-0439c8ed6279\") " Oct 07 00:15:02 crc kubenswrapper[4791]: I1007 00:15:02.742461 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0d90400b-7efa-4d73-9c25-0439c8ed6279-secret-volume\") pod \"0d90400b-7efa-4d73-9c25-0439c8ed6279\" (UID: \"0d90400b-7efa-4d73-9c25-0439c8ed6279\") " Oct 07 00:15:02 crc kubenswrapper[4791]: I1007 00:15:02.742897 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d90400b-7efa-4d73-9c25-0439c8ed6279-config-volume" (OuterVolumeSpecName: "config-volume") pod "0d90400b-7efa-4d73-9c25-0439c8ed6279" (UID: "0d90400b-7efa-4d73-9c25-0439c8ed6279"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:15:02 crc kubenswrapper[4791]: I1007 00:15:02.748813 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d90400b-7efa-4d73-9c25-0439c8ed6279-kube-api-access-zz9qb" (OuterVolumeSpecName: "kube-api-access-zz9qb") pod "0d90400b-7efa-4d73-9c25-0439c8ed6279" (UID: "0d90400b-7efa-4d73-9c25-0439c8ed6279"). InnerVolumeSpecName "kube-api-access-zz9qb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:15:02 crc kubenswrapper[4791]: I1007 00:15:02.749271 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d90400b-7efa-4d73-9c25-0439c8ed6279-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0d90400b-7efa-4d73-9c25-0439c8ed6279" (UID: "0d90400b-7efa-4d73-9c25-0439c8ed6279"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:15:02 crc kubenswrapper[4791]: I1007 00:15:02.844126 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zz9qb\" (UniqueName: \"kubernetes.io/projected/0d90400b-7efa-4d73-9c25-0439c8ed6279-kube-api-access-zz9qb\") on node \"crc\" DevicePath \"\"" Oct 07 00:15:02 crc kubenswrapper[4791]: I1007 00:15:02.844161 4791 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0d90400b-7efa-4d73-9c25-0439c8ed6279-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 00:15:02 crc kubenswrapper[4791]: I1007 00:15:02.844171 4791 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0d90400b-7efa-4d73-9c25-0439c8ed6279-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 00:15:03 crc kubenswrapper[4791]: I1007 00:15:03.360085 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" event={"ID":"0d90400b-7efa-4d73-9c25-0439c8ed6279","Type":"ContainerDied","Data":"639c3960ef990c6779fd1e1ec18db6152918496a498c9b3c76e660fb6edebf9b"} Oct 07 00:15:03 crc kubenswrapper[4791]: I1007 00:15:03.360138 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="639c3960ef990c6779fd1e1ec18db6152918496a498c9b3c76e660fb6edebf9b" Oct 07 00:15:03 crc kubenswrapper[4791]: I1007 00:15:03.360157 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329935-96zwt" Oct 07 00:16:41 crc kubenswrapper[4791]: I1007 00:16:41.600907 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:16:41 crc kubenswrapper[4791]: I1007 00:16:41.601640 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.036341 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bjmt9"] Oct 07 00:16:59 crc kubenswrapper[4791]: E1007 00:16:59.037171 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d90400b-7efa-4d73-9c25-0439c8ed6279" containerName="collect-profiles" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.037184 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d90400b-7efa-4d73-9c25-0439c8ed6279" containerName="collect-profiles" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.037283 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d90400b-7efa-4d73-9c25-0439c8ed6279" containerName="collect-profiles" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.037701 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.064877 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bjmt9"] Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.103893 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.104148 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-trusted-ca\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.104277 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.104384 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-registry-tls\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.104503 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-bound-sa-token\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.104619 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.104806 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-registry-certificates\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.104912 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mq8j\" (UniqueName: \"kubernetes.io/projected/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-kube-api-access-5mq8j\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.129860 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.206197 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.206249 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-trusted-ca\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.206276 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.206298 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-registry-tls\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.206324 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-bound-sa-token\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.206351 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-registry-certificates\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.206374 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mq8j\" (UniqueName: \"kubernetes.io/projected/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-kube-api-access-5mq8j\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.208151 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-registry-certificates\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.208178 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-trusted-ca\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.208565 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.221146 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.221830 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-registry-tls\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.228796 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mq8j\" (UniqueName: \"kubernetes.io/projected/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-kube-api-access-5mq8j\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.237486 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/86d6e5b2-dee0-494f-b3f8-77222a3d9db1-bound-sa-token\") pod \"image-registry-66df7c8f76-bjmt9\" (UID: \"86d6e5b2-dee0-494f-b3f8-77222a3d9db1\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.354177 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:16:59 crc kubenswrapper[4791]: I1007 00:16:59.573192 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bjmt9"] Oct 07 00:17:00 crc kubenswrapper[4791]: I1007 00:17:00.051012 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" event={"ID":"86d6e5b2-dee0-494f-b3f8-77222a3d9db1","Type":"ContainerStarted","Data":"596eb6957d1560a6ca1630476bc3247da5718313e1d84f3ebcc9d1c4ce2d2d67"} Oct 07 00:17:00 crc kubenswrapper[4791]: I1007 00:17:00.051098 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" event={"ID":"86d6e5b2-dee0-494f-b3f8-77222a3d9db1","Type":"ContainerStarted","Data":"7a6efa4d9aa6faebf6c3e3063a30d04a4a2e83774955e726f2767662d1b82d27"} Oct 07 00:17:00 crc kubenswrapper[4791]: I1007 00:17:00.051271 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:17:00 crc kubenswrapper[4791]: I1007 00:17:00.075860 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" podStartSLOduration=1.075802666 podStartE2EDuration="1.075802666s" podCreationTimestamp="2025-10-07 00:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:17:00.072075346 +0000 UTC m=+346.668012997" watchObservedRunningTime="2025-10-07 00:17:00.075802666 +0000 UTC m=+346.671740317" Oct 07 00:17:11 crc kubenswrapper[4791]: I1007 00:17:11.600914 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:17:11 crc kubenswrapper[4791]: I1007 00:17:11.601539 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:17:19 crc kubenswrapper[4791]: I1007 00:17:19.359260 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-bjmt9" Oct 07 00:17:19 crc kubenswrapper[4791]: I1007 00:17:19.409996 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zpqb4"] Oct 07 00:17:41 crc kubenswrapper[4791]: I1007 00:17:41.600758 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:17:41 crc kubenswrapper[4791]: I1007 00:17:41.601199 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:17:41 crc kubenswrapper[4791]: I1007 00:17:41.601274 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:17:41 crc kubenswrapper[4791]: I1007 00:17:41.602056 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"88997fcb013242215faf11bfd8064aa2df1ecd9fcdd1623ba0f00e9805ada21b"} pod="openshift-machine-config-operator/machine-config-daemon-h728c" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 00:17:41 crc kubenswrapper[4791]: I1007 00:17:41.602130 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" containerID="cri-o://88997fcb013242215faf11bfd8064aa2df1ecd9fcdd1623ba0f00e9805ada21b" gracePeriod=600 Oct 07 00:17:42 crc kubenswrapper[4791]: I1007 00:17:42.315422 4791 generic.go:334] "Generic (PLEG): container finished" podID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerID="88997fcb013242215faf11bfd8064aa2df1ecd9fcdd1623ba0f00e9805ada21b" exitCode=0 Oct 07 00:17:42 crc kubenswrapper[4791]: I1007 00:17:42.315533 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerDied","Data":"88997fcb013242215faf11bfd8064aa2df1ecd9fcdd1623ba0f00e9805ada21b"} Oct 07 00:17:42 crc kubenswrapper[4791]: I1007 00:17:42.315940 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerStarted","Data":"31ce44ec022d50902d76172a803393157894bbd037a976f1807bc4f95c7c05c8"} Oct 07 00:17:42 crc kubenswrapper[4791]: I1007 00:17:42.315970 4791 scope.go:117] "RemoveContainer" containerID="b844417ca61456d702cd7d399400b9436b039a8008cf59516b9618fde1b1aa0b" Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.449847 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" podUID="05472df6-c385-4574-ba3c-844fe282b74b" containerName="registry" containerID="cri-o://1f48e4d76e1999112afece85e4e559b4232b40293a8dfc5cb83a89ed8dd50d46" gracePeriod=30 Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.800901 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.948583 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05472df6-c385-4574-ba3c-844fe282b74b-trusted-ca\") pod \"05472df6-c385-4574-ba3c-844fe282b74b\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.948866 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-bound-sa-token\") pod \"05472df6-c385-4574-ba3c-844fe282b74b\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.948910 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9z7m\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-kube-api-access-h9z7m\") pod \"05472df6-c385-4574-ba3c-844fe282b74b\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.948931 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/05472df6-c385-4574-ba3c-844fe282b74b-ca-trust-extracted\") pod \"05472df6-c385-4574-ba3c-844fe282b74b\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.949010 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-registry-tls\") pod \"05472df6-c385-4574-ba3c-844fe282b74b\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.949030 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/05472df6-c385-4574-ba3c-844fe282b74b-registry-certificates\") pod \"05472df6-c385-4574-ba3c-844fe282b74b\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.949148 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"05472df6-c385-4574-ba3c-844fe282b74b\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.949177 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/05472df6-c385-4574-ba3c-844fe282b74b-installation-pull-secrets\") pod \"05472df6-c385-4574-ba3c-844fe282b74b\" (UID: \"05472df6-c385-4574-ba3c-844fe282b74b\") " Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.949662 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05472df6-c385-4574-ba3c-844fe282b74b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "05472df6-c385-4574-ba3c-844fe282b74b" (UID: "05472df6-c385-4574-ba3c-844fe282b74b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.950739 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05472df6-c385-4574-ba3c-844fe282b74b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "05472df6-c385-4574-ba3c-844fe282b74b" (UID: "05472df6-c385-4574-ba3c-844fe282b74b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.955183 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-kube-api-access-h9z7m" (OuterVolumeSpecName: "kube-api-access-h9z7m") pod "05472df6-c385-4574-ba3c-844fe282b74b" (UID: "05472df6-c385-4574-ba3c-844fe282b74b"). InnerVolumeSpecName "kube-api-access-h9z7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.956913 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05472df6-c385-4574-ba3c-844fe282b74b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "05472df6-c385-4574-ba3c-844fe282b74b" (UID: "05472df6-c385-4574-ba3c-844fe282b74b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.957190 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "05472df6-c385-4574-ba3c-844fe282b74b" (UID: "05472df6-c385-4574-ba3c-844fe282b74b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.960691 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "05472df6-c385-4574-ba3c-844fe282b74b" (UID: "05472df6-c385-4574-ba3c-844fe282b74b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.964644 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "05472df6-c385-4574-ba3c-844fe282b74b" (UID: "05472df6-c385-4574-ba3c-844fe282b74b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 07 00:17:44 crc kubenswrapper[4791]: I1007 00:17:44.971330 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05472df6-c385-4574-ba3c-844fe282b74b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "05472df6-c385-4574-ba3c-844fe282b74b" (UID: "05472df6-c385-4574-ba3c-844fe282b74b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.050188 4791 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.050224 4791 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/05472df6-c385-4574-ba3c-844fe282b74b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.050239 4791 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/05472df6-c385-4574-ba3c-844fe282b74b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.050248 4791 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05472df6-c385-4574-ba3c-844fe282b74b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.050259 4791 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.050268 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9z7m\" (UniqueName: \"kubernetes.io/projected/05472df6-c385-4574-ba3c-844fe282b74b-kube-api-access-h9z7m\") on node \"crc\" DevicePath \"\"" Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.050276 4791 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/05472df6-c385-4574-ba3c-844fe282b74b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.339742 4791 generic.go:334] "Generic (PLEG): container finished" podID="05472df6-c385-4574-ba3c-844fe282b74b" containerID="1f48e4d76e1999112afece85e4e559b4232b40293a8dfc5cb83a89ed8dd50d46" exitCode=0 Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.339815 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.339835 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" event={"ID":"05472df6-c385-4574-ba3c-844fe282b74b","Type":"ContainerDied","Data":"1f48e4d76e1999112afece85e4e559b4232b40293a8dfc5cb83a89ed8dd50d46"} Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.340215 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zpqb4" event={"ID":"05472df6-c385-4574-ba3c-844fe282b74b","Type":"ContainerDied","Data":"d90aade3db653d03c82e0960164fe0b4429f9a0a6f10d24d34b8af27c3cd3670"} Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.340235 4791 scope.go:117] "RemoveContainer" containerID="1f48e4d76e1999112afece85e4e559b4232b40293a8dfc5cb83a89ed8dd50d46" Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.358529 4791 scope.go:117] "RemoveContainer" containerID="1f48e4d76e1999112afece85e4e559b4232b40293a8dfc5cb83a89ed8dd50d46" Oct 07 00:17:45 crc kubenswrapper[4791]: E1007 00:17:45.359103 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f48e4d76e1999112afece85e4e559b4232b40293a8dfc5cb83a89ed8dd50d46\": container with ID starting with 1f48e4d76e1999112afece85e4e559b4232b40293a8dfc5cb83a89ed8dd50d46 not found: ID does not exist" containerID="1f48e4d76e1999112afece85e4e559b4232b40293a8dfc5cb83a89ed8dd50d46" Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.359135 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f48e4d76e1999112afece85e4e559b4232b40293a8dfc5cb83a89ed8dd50d46"} err="failed to get container status \"1f48e4d76e1999112afece85e4e559b4232b40293a8dfc5cb83a89ed8dd50d46\": rpc error: code = NotFound desc = could not find container \"1f48e4d76e1999112afece85e4e559b4232b40293a8dfc5cb83a89ed8dd50d46\": container with ID starting with 1f48e4d76e1999112afece85e4e559b4232b40293a8dfc5cb83a89ed8dd50d46 not found: ID does not exist" Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.375366 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zpqb4"] Oct 07 00:17:45 crc kubenswrapper[4791]: I1007 00:17:45.380786 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zpqb4"] Oct 07 00:17:46 crc kubenswrapper[4791]: I1007 00:17:46.076252 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05472df6-c385-4574-ba3c-844fe282b74b" path="/var/lib/kubelet/pods/05472df6-c385-4574-ba3c-844fe282b74b/volumes" Oct 07 00:19:14 crc kubenswrapper[4791]: I1007 00:19:14.185892 4791 scope.go:117] "RemoveContainer" containerID="ba745c5c3cbf03040a5667e3ba681c9fa91a1db9bc39bb183444bb607f6a45b3" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.517487 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-n6cgf"] Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.520240 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovn-controller" containerID="cri-o://866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b" gracePeriod=30 Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.520413 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f" gracePeriod=30 Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.520527 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovn-acl-logging" containerID="cri-o://7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f" gracePeriod=30 Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.520606 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="northd" containerID="cri-o://30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79" gracePeriod=30 Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.520617 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="sbdb" containerID="cri-o://4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd" gracePeriod=30 Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.520616 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="kube-rbac-proxy-node" containerID="cri-o://b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd" gracePeriod=30 Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.520335 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="nbdb" containerID="cri-o://04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f" gracePeriod=30 Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.550938 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovnkube-controller" containerID="cri-o://397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a" gracePeriod=30 Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.862938 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovnkube-controller/2.log" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.866151 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovn-acl-logging/0.log" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.866693 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovn-controller/0.log" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.867196 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.933386 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-t6hnw"] Oct 07 00:19:36 crc kubenswrapper[4791]: E1007 00:19:36.933698 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05472df6-c385-4574-ba3c-844fe282b74b" containerName="registry" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.933726 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="05472df6-c385-4574-ba3c-844fe282b74b" containerName="registry" Oct 07 00:19:36 crc kubenswrapper[4791]: E1007 00:19:36.933761 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovn-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.933770 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovn-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: E1007 00:19:36.933795 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovn-acl-logging" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.933804 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovn-acl-logging" Oct 07 00:19:36 crc kubenswrapper[4791]: E1007 00:19:36.933815 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="kube-rbac-proxy-node" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.933824 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="kube-rbac-proxy-node" Oct 07 00:19:36 crc kubenswrapper[4791]: E1007 00:19:36.933835 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="nbdb" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.933842 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="nbdb" Oct 07 00:19:36 crc kubenswrapper[4791]: E1007 00:19:36.933924 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovnkube-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.933936 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovnkube-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: E1007 00:19:36.933948 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="sbdb" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.933959 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="sbdb" Oct 07 00:19:36 crc kubenswrapper[4791]: E1007 00:19:36.933971 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="kube-rbac-proxy-ovn-metrics" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934042 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="kube-rbac-proxy-ovn-metrics" Oct 07 00:19:36 crc kubenswrapper[4791]: E1007 00:19:36.934053 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovnkube-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934061 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovnkube-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: E1007 00:19:36.934071 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="kubecfg-setup" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934082 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="kubecfg-setup" Oct 07 00:19:36 crc kubenswrapper[4791]: E1007 00:19:36.934094 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovnkube-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934102 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovnkube-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: E1007 00:19:36.934113 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="northd" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934121 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="northd" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934266 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovnkube-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934282 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="kube-rbac-proxy-node" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934297 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovn-acl-logging" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934306 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovn-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934314 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovnkube-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934323 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovnkube-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934333 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="sbdb" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934344 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="kube-rbac-proxy-ovn-metrics" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934353 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="nbdb" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934362 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="05472df6-c385-4574-ba3c-844fe282b74b" containerName="registry" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934373 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="northd" Oct 07 00:19:36 crc kubenswrapper[4791]: E1007 00:19:36.934520 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovnkube-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934531 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovnkube-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.934635 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerName="ovnkube-controller" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.936910 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.995647 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-node-log\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.995705 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-systemd\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.995745 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-ovnkube-script-lib\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.995766 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-env-overrides\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.995786 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-ovnkube-config\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.995810 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-kubelet\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.995856 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-ovn\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.995880 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-cni-bin\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.995898 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-slash\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.995917 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-systemd-units\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.995945 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grsfn\" (UniqueName: \"kubernetes.io/projected/47547f34-4a66-4d60-8d38-af69eb320b1d-kube-api-access-grsfn\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.995987 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-run-ovn-kubernetes\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996038 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-var-lib-openvswitch\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996059 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-run-netns\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996078 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996097 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-cni-netd\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996125 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-etc-openvswitch\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996147 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-openvswitch\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996169 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/47547f34-4a66-4d60-8d38-af69eb320b1d-ovn-node-metrics-cert\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996185 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-log-socket\") pod \"47547f34-4a66-4d60-8d38-af69eb320b1d\" (UID: \"47547f34-4a66-4d60-8d38-af69eb320b1d\") " Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996389 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-log-socket" (OuterVolumeSpecName: "log-socket") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996492 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996585 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-node-log" (OuterVolumeSpecName: "node-log") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996685 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996738 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996767 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996815 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996834 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996861 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996851 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996903 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-slash" (OuterVolumeSpecName: "host-slash") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996921 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996909 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.996929 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.997518 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.997555 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:19:36 crc kubenswrapper[4791]: I1007 00:19:36.997661 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.002270 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47547f34-4a66-4d60-8d38-af69eb320b1d-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.002322 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47547f34-4a66-4d60-8d38-af69eb320b1d-kube-api-access-grsfn" (OuterVolumeSpecName: "kube-api-access-grsfn") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "kube-api-access-grsfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.009484 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "47547f34-4a66-4d60-8d38-af69eb320b1d" (UID: "47547f34-4a66-4d60-8d38-af69eb320b1d"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.036352 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xbjfx_8a389028-af4a-4b2c-a638-04eac9238628/kube-multus/1.log" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.037260 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xbjfx_8a389028-af4a-4b2c-a638-04eac9238628/kube-multus/0.log" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.037371 4791 generic.go:334] "Generic (PLEG): container finished" podID="8a389028-af4a-4b2c-a638-04eac9238628" containerID="102807f499ff8337104e02abb4aabfcac759e06177c81422948fda65e540df1b" exitCode=2 Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.037476 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xbjfx" event={"ID":"8a389028-af4a-4b2c-a638-04eac9238628","Type":"ContainerDied","Data":"102807f499ff8337104e02abb4aabfcac759e06177c81422948fda65e540df1b"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.037545 4791 scope.go:117] "RemoveContainer" containerID="faba622e4f20c07c3d36a8572f27a5e6027536dc63dfc159aac5e3ec8efe2053" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.038032 4791 scope.go:117] "RemoveContainer" containerID="102807f499ff8337104e02abb4aabfcac759e06177c81422948fda65e540df1b" Oct 07 00:19:37 crc kubenswrapper[4791]: E1007 00:19:37.038213 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-xbjfx_openshift-multus(8a389028-af4a-4b2c-a638-04eac9238628)\"" pod="openshift-multus/multus-xbjfx" podUID="8a389028-af4a-4b2c-a638-04eac9238628" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.042866 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovnkube-controller/2.log" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.044654 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovn-acl-logging/0.log" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045059 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-n6cgf_47547f34-4a66-4d60-8d38-af69eb320b1d/ovn-controller/0.log" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045479 4791 generic.go:334] "Generic (PLEG): container finished" podID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerID="397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a" exitCode=0 Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045505 4791 generic.go:334] "Generic (PLEG): container finished" podID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerID="4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd" exitCode=0 Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045512 4791 generic.go:334] "Generic (PLEG): container finished" podID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerID="04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f" exitCode=0 Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045520 4791 generic.go:334] "Generic (PLEG): container finished" podID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerID="30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79" exitCode=0 Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045527 4791 generic.go:334] "Generic (PLEG): container finished" podID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerID="25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f" exitCode=0 Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045534 4791 generic.go:334] "Generic (PLEG): container finished" podID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerID="b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd" exitCode=0 Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045541 4791 generic.go:334] "Generic (PLEG): container finished" podID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerID="7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f" exitCode=143 Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045548 4791 generic.go:334] "Generic (PLEG): container finished" podID="47547f34-4a66-4d60-8d38-af69eb320b1d" containerID="866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b" exitCode=143 Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045569 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerDied","Data":"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045598 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerDied","Data":"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045610 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerDied","Data":"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045619 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerDied","Data":"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045628 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerDied","Data":"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045637 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerDied","Data":"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045648 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045659 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045664 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045669 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045674 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045679 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045683 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045688 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045693 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045698 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045705 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerDied","Data":"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045713 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045718 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045723 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045728 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045734 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045740 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045745 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045751 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045756 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045762 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045769 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerDied","Data":"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045778 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045784 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045791 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045796 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045801 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045806 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045813 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045817 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045823 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045827 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045835 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" event={"ID":"47547f34-4a66-4d60-8d38-af69eb320b1d","Type":"ContainerDied","Data":"9735fa07b69c4bac03dad134cd5beee003b25a4f931f7edd798137c289e11519"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045842 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045850 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045855 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045861 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045866 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045871 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045876 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045881 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045886 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045891 4791 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e"} Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.045992 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-n6cgf" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.067667 4791 scope.go:117] "RemoveContainer" containerID="397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.087632 4791 scope.go:117] "RemoveContainer" containerID="296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.099982 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8dda0fdc-b48e-412a-87f6-07ffde936b2f-ovn-node-metrics-cert\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100053 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-run-ovn-kubernetes\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100079 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-run-systemd\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100100 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8dda0fdc-b48e-412a-87f6-07ffde936b2f-ovnkube-config\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100119 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-cni-netd\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100136 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcz9v\" (UniqueName: \"kubernetes.io/projected/8dda0fdc-b48e-412a-87f6-07ffde936b2f-kube-api-access-gcz9v\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100168 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-node-log\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100190 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-systemd-units\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100206 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100226 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-run-netns\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100243 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-run-ovn\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100272 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8dda0fdc-b48e-412a-87f6-07ffde936b2f-env-overrides\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100293 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-run-openvswitch\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100314 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-etc-openvswitch\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100332 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-var-lib-openvswitch\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100353 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-kubelet\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100373 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-cni-bin\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100396 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-log-socket\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100440 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-slash\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100463 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8dda0fdc-b48e-412a-87f6-07ffde936b2f-ovnkube-script-lib\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100515 4791 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100525 4791 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100534 4791 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/47547f34-4a66-4d60-8d38-af69eb320b1d-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100543 4791 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-log-socket\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100550 4791 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-node-log\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100558 4791 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100567 4791 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100576 4791 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100587 4791 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/47547f34-4a66-4d60-8d38-af69eb320b1d-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100595 4791 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100603 4791 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100611 4791 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100622 4791 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-slash\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100629 4791 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100638 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grsfn\" (UniqueName: \"kubernetes.io/projected/47547f34-4a66-4d60-8d38-af69eb320b1d-kube-api-access-grsfn\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100647 4791 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100655 4791 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100663 4791 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100674 4791 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.100682 4791 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/47547f34-4a66-4d60-8d38-af69eb320b1d-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.105267 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-n6cgf"] Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.109246 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-n6cgf"] Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.112189 4791 scope.go:117] "RemoveContainer" containerID="4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.127352 4791 scope.go:117] "RemoveContainer" containerID="04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.140584 4791 scope.go:117] "RemoveContainer" containerID="30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.155802 4791 scope.go:117] "RemoveContainer" containerID="25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.171673 4791 scope.go:117] "RemoveContainer" containerID="b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.189987 4791 scope.go:117] "RemoveContainer" containerID="7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202248 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-etc-openvswitch\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202284 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-var-lib-openvswitch\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202313 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-kubelet\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202339 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-cni-bin\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202364 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-log-socket\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202379 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-etc-openvswitch\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202395 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-slash\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202438 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-slash\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202450 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-cni-bin\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202492 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-log-socket\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202463 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8dda0fdc-b48e-412a-87f6-07ffde936b2f-ovnkube-script-lib\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202511 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-kubelet\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202531 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8dda0fdc-b48e-412a-87f6-07ffde936b2f-ovn-node-metrics-cert\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202586 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-var-lib-openvswitch\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202611 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-run-ovn-kubernetes\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202698 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-run-ovn-kubernetes\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202780 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-run-systemd\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202826 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8dda0fdc-b48e-412a-87f6-07ffde936b2f-ovnkube-config\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202835 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-run-systemd\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202848 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-cni-netd\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202869 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcz9v\" (UniqueName: \"kubernetes.io/projected/8dda0fdc-b48e-412a-87f6-07ffde936b2f-kube-api-access-gcz9v\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202887 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-node-log\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202911 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-systemd-units\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202937 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202967 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-run-netns\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.202986 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-run-ovn\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.203002 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8dda0fdc-b48e-412a-87f6-07ffde936b2f-env-overrides\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.203030 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-run-openvswitch\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.203081 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-run-openvswitch\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.203251 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-systemd-units\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.203283 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.203301 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-cni-netd\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.203335 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-run-ovn\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.203358 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-node-log\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.203375 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8dda0fdc-b48e-412a-87f6-07ffde936b2f-host-run-netns\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.203523 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8dda0fdc-b48e-412a-87f6-07ffde936b2f-ovnkube-script-lib\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.203960 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8dda0fdc-b48e-412a-87f6-07ffde936b2f-env-overrides\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.204018 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8dda0fdc-b48e-412a-87f6-07ffde936b2f-ovnkube-config\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.206718 4791 scope.go:117] "RemoveContainer" containerID="866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.207996 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8dda0fdc-b48e-412a-87f6-07ffde936b2f-ovn-node-metrics-cert\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.220929 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcz9v\" (UniqueName: \"kubernetes.io/projected/8dda0fdc-b48e-412a-87f6-07ffde936b2f-kube-api-access-gcz9v\") pod \"ovnkube-node-t6hnw\" (UID: \"8dda0fdc-b48e-412a-87f6-07ffde936b2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.222002 4791 scope.go:117] "RemoveContainer" containerID="a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.237559 4791 scope.go:117] "RemoveContainer" containerID="397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a" Oct 07 00:19:37 crc kubenswrapper[4791]: E1007 00:19:37.238053 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a\": container with ID starting with 397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a not found: ID does not exist" containerID="397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.238116 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a"} err="failed to get container status \"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a\": rpc error: code = NotFound desc = could not find container \"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a\": container with ID starting with 397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.238168 4791 scope.go:117] "RemoveContainer" containerID="296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3" Oct 07 00:19:37 crc kubenswrapper[4791]: E1007 00:19:37.238714 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\": container with ID starting with 296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3 not found: ID does not exist" containerID="296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.238802 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3"} err="failed to get container status \"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\": rpc error: code = NotFound desc = could not find container \"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\": container with ID starting with 296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3 not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.238868 4791 scope.go:117] "RemoveContainer" containerID="4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd" Oct 07 00:19:37 crc kubenswrapper[4791]: E1007 00:19:37.239430 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\": container with ID starting with 4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd not found: ID does not exist" containerID="4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.239520 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd"} err="failed to get container status \"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\": rpc error: code = NotFound desc = could not find container \"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\": container with ID starting with 4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.239543 4791 scope.go:117] "RemoveContainer" containerID="04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f" Oct 07 00:19:37 crc kubenswrapper[4791]: E1007 00:19:37.239878 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\": container with ID starting with 04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f not found: ID does not exist" containerID="04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.239921 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f"} err="failed to get container status \"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\": rpc error: code = NotFound desc = could not find container \"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\": container with ID starting with 04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.239948 4791 scope.go:117] "RemoveContainer" containerID="30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79" Oct 07 00:19:37 crc kubenswrapper[4791]: E1007 00:19:37.240274 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\": container with ID starting with 30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79 not found: ID does not exist" containerID="30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.240306 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79"} err="failed to get container status \"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\": rpc error: code = NotFound desc = could not find container \"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\": container with ID starting with 30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79 not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.240327 4791 scope.go:117] "RemoveContainer" containerID="25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f" Oct 07 00:19:37 crc kubenswrapper[4791]: E1007 00:19:37.240587 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\": container with ID starting with 25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f not found: ID does not exist" containerID="25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.240610 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f"} err="failed to get container status \"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\": rpc error: code = NotFound desc = could not find container \"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\": container with ID starting with 25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.240630 4791 scope.go:117] "RemoveContainer" containerID="b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd" Oct 07 00:19:37 crc kubenswrapper[4791]: E1007 00:19:37.240949 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\": container with ID starting with b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd not found: ID does not exist" containerID="b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.240982 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd"} err="failed to get container status \"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\": rpc error: code = NotFound desc = could not find container \"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\": container with ID starting with b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.241000 4791 scope.go:117] "RemoveContainer" containerID="7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f" Oct 07 00:19:37 crc kubenswrapper[4791]: E1007 00:19:37.241301 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\": container with ID starting with 7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f not found: ID does not exist" containerID="7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.241353 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f"} err="failed to get container status \"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\": rpc error: code = NotFound desc = could not find container \"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\": container with ID starting with 7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.241389 4791 scope.go:117] "RemoveContainer" containerID="866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b" Oct 07 00:19:37 crc kubenswrapper[4791]: E1007 00:19:37.241865 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\": container with ID starting with 866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b not found: ID does not exist" containerID="866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.241898 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b"} err="failed to get container status \"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\": rpc error: code = NotFound desc = could not find container \"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\": container with ID starting with 866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.241915 4791 scope.go:117] "RemoveContainer" containerID="a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e" Oct 07 00:19:37 crc kubenswrapper[4791]: E1007 00:19:37.242202 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\": container with ID starting with a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e not found: ID does not exist" containerID="a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.242257 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e"} err="failed to get container status \"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\": rpc error: code = NotFound desc = could not find container \"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\": container with ID starting with a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.242300 4791 scope.go:117] "RemoveContainer" containerID="397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.242624 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a"} err="failed to get container status \"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a\": rpc error: code = NotFound desc = could not find container \"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a\": container with ID starting with 397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.242647 4791 scope.go:117] "RemoveContainer" containerID="296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.243102 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3"} err="failed to get container status \"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\": rpc error: code = NotFound desc = could not find container \"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\": container with ID starting with 296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3 not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.243130 4791 scope.go:117] "RemoveContainer" containerID="4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.243434 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd"} err="failed to get container status \"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\": rpc error: code = NotFound desc = could not find container \"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\": container with ID starting with 4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.243458 4791 scope.go:117] "RemoveContainer" containerID="04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.243673 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f"} err="failed to get container status \"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\": rpc error: code = NotFound desc = could not find container \"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\": container with ID starting with 04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.243697 4791 scope.go:117] "RemoveContainer" containerID="30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.243961 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79"} err="failed to get container status \"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\": rpc error: code = NotFound desc = could not find container \"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\": container with ID starting with 30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79 not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.243984 4791 scope.go:117] "RemoveContainer" containerID="25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.244259 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f"} err="failed to get container status \"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\": rpc error: code = NotFound desc = could not find container \"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\": container with ID starting with 25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.244292 4791 scope.go:117] "RemoveContainer" containerID="b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.244588 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd"} err="failed to get container status \"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\": rpc error: code = NotFound desc = could not find container \"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\": container with ID starting with b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.244616 4791 scope.go:117] "RemoveContainer" containerID="7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.244895 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f"} err="failed to get container status \"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\": rpc error: code = NotFound desc = could not find container \"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\": container with ID starting with 7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.244919 4791 scope.go:117] "RemoveContainer" containerID="866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.245345 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b"} err="failed to get container status \"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\": rpc error: code = NotFound desc = could not find container \"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\": container with ID starting with 866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.245366 4791 scope.go:117] "RemoveContainer" containerID="a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.245653 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e"} err="failed to get container status \"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\": rpc error: code = NotFound desc = could not find container \"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\": container with ID starting with a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.245675 4791 scope.go:117] "RemoveContainer" containerID="397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.245959 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a"} err="failed to get container status \"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a\": rpc error: code = NotFound desc = could not find container \"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a\": container with ID starting with 397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.245980 4791 scope.go:117] "RemoveContainer" containerID="296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.246391 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3"} err="failed to get container status \"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\": rpc error: code = NotFound desc = could not find container \"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\": container with ID starting with 296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3 not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.246430 4791 scope.go:117] "RemoveContainer" containerID="4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.246688 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd"} err="failed to get container status \"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\": rpc error: code = NotFound desc = could not find container \"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\": container with ID starting with 4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.246711 4791 scope.go:117] "RemoveContainer" containerID="04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.247092 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f"} err="failed to get container status \"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\": rpc error: code = NotFound desc = could not find container \"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\": container with ID starting with 04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.247122 4791 scope.go:117] "RemoveContainer" containerID="30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.247386 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79"} err="failed to get container status \"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\": rpc error: code = NotFound desc = could not find container \"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\": container with ID starting with 30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79 not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.247428 4791 scope.go:117] "RemoveContainer" containerID="25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.247692 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f"} err="failed to get container status \"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\": rpc error: code = NotFound desc = could not find container \"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\": container with ID starting with 25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.247722 4791 scope.go:117] "RemoveContainer" containerID="b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.247999 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd"} err="failed to get container status \"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\": rpc error: code = NotFound desc = could not find container \"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\": container with ID starting with b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.248020 4791 scope.go:117] "RemoveContainer" containerID="7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.248455 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f"} err="failed to get container status \"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\": rpc error: code = NotFound desc = could not find container \"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\": container with ID starting with 7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.248489 4791 scope.go:117] "RemoveContainer" containerID="866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.248763 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b"} err="failed to get container status \"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\": rpc error: code = NotFound desc = could not find container \"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\": container with ID starting with 866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.248785 4791 scope.go:117] "RemoveContainer" containerID="a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.249106 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e"} err="failed to get container status \"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\": rpc error: code = NotFound desc = could not find container \"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\": container with ID starting with a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.249138 4791 scope.go:117] "RemoveContainer" containerID="397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.249431 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a"} err="failed to get container status \"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a\": rpc error: code = NotFound desc = could not find container \"397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a\": container with ID starting with 397ae1882389dd2e53f6928d772adecf2814ae69d9f439e545e8893378940b4a not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.249455 4791 scope.go:117] "RemoveContainer" containerID="296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.249724 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3"} err="failed to get container status \"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\": rpc error: code = NotFound desc = could not find container \"296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3\": container with ID starting with 296019d53d1d185d7e46bd541116eb47f8b0faf629e7ca18edf6851e1ffccda3 not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.249754 4791 scope.go:117] "RemoveContainer" containerID="4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.250070 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd"} err="failed to get container status \"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\": rpc error: code = NotFound desc = could not find container \"4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd\": container with ID starting with 4530f1ca2d4ffc673ee95226f4649b4cfd6238240c65cd4bfb51d4706c8e2ebd not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.250093 4791 scope.go:117] "RemoveContainer" containerID="04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.250478 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f"} err="failed to get container status \"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\": rpc error: code = NotFound desc = could not find container \"04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f\": container with ID starting with 04dd09f221fba68c492e75603cd6972ecc56c69d5ae79eca709c2fe2e26eb80f not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.250811 4791 scope.go:117] "RemoveContainer" containerID="30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.251165 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79"} err="failed to get container status \"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\": rpc error: code = NotFound desc = could not find container \"30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79\": container with ID starting with 30a3e76ba7b6350eff9a05738af0034910255965d89092e651039778d6e1fe79 not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.251191 4791 scope.go:117] "RemoveContainer" containerID="25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.251395 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f"} err="failed to get container status \"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\": rpc error: code = NotFound desc = could not find container \"25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f\": container with ID starting with 25fcfeac8ab8d40d100a7ab2343ed935a063b66f72960188e0a3c45309779b7f not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.251439 4791 scope.go:117] "RemoveContainer" containerID="b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.251684 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd"} err="failed to get container status \"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\": rpc error: code = NotFound desc = could not find container \"b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd\": container with ID starting with b0a1dc4a344a88be5a40dc2105cf7e881836151b399139d792917453f50291fd not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.251715 4791 scope.go:117] "RemoveContainer" containerID="7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.251968 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f"} err="failed to get container status \"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\": rpc error: code = NotFound desc = could not find container \"7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f\": container with ID starting with 7b148065bc558b0317aa810519c8401b7b0ac0ec68136cc672672060ef6aa37f not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.251988 4791 scope.go:117] "RemoveContainer" containerID="866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.252352 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b"} err="failed to get container status \"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\": rpc error: code = NotFound desc = could not find container \"866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b\": container with ID starting with 866c5b6c4f62c6dfc3419cdedf0073d1d040fdc5f018b80e3649601f9ace827b not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.252370 4791 scope.go:117] "RemoveContainer" containerID="a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.252675 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e"} err="failed to get container status \"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\": rpc error: code = NotFound desc = could not find container \"a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e\": container with ID starting with a70d1a70ff31985be99d2acce4d0f4849b1d8c08939c19379979168c6939f69e not found: ID does not exist" Oct 07 00:19:37 crc kubenswrapper[4791]: I1007 00:19:37.254087 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:38 crc kubenswrapper[4791]: I1007 00:19:38.056262 4791 generic.go:334] "Generic (PLEG): container finished" podID="8dda0fdc-b48e-412a-87f6-07ffde936b2f" containerID="196192cdfaf8a7084c73b547b7064d627133635f1c567e767bdfcdfa2fd62bef" exitCode=0 Oct 07 00:19:38 crc kubenswrapper[4791]: I1007 00:19:38.056360 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" event={"ID":"8dda0fdc-b48e-412a-87f6-07ffde936b2f","Type":"ContainerDied","Data":"196192cdfaf8a7084c73b547b7064d627133635f1c567e767bdfcdfa2fd62bef"} Oct 07 00:19:38 crc kubenswrapper[4791]: I1007 00:19:38.056961 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" event={"ID":"8dda0fdc-b48e-412a-87f6-07ffde936b2f","Type":"ContainerStarted","Data":"bfe26f2406579a074b22d8f5d32073aaa9a67045be6aea8f273d0d29ac8929a1"} Oct 07 00:19:38 crc kubenswrapper[4791]: I1007 00:19:38.059739 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xbjfx_8a389028-af4a-4b2c-a638-04eac9238628/kube-multus/1.log" Oct 07 00:19:38 crc kubenswrapper[4791]: I1007 00:19:38.079005 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47547f34-4a66-4d60-8d38-af69eb320b1d" path="/var/lib/kubelet/pods/47547f34-4a66-4d60-8d38-af69eb320b1d/volumes" Oct 07 00:19:39 crc kubenswrapper[4791]: I1007 00:19:39.072316 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" event={"ID":"8dda0fdc-b48e-412a-87f6-07ffde936b2f","Type":"ContainerStarted","Data":"eb02969ac6a644ae9d49301bd042a3a10564df9488f93e8e7056555b3e2106ea"} Oct 07 00:19:39 crc kubenswrapper[4791]: I1007 00:19:39.072677 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" event={"ID":"8dda0fdc-b48e-412a-87f6-07ffde936b2f","Type":"ContainerStarted","Data":"30d98a75630c9d9649d294e72330e310278b9dd9cf69462c92c879c2e758849e"} Oct 07 00:19:39 crc kubenswrapper[4791]: I1007 00:19:39.072692 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" event={"ID":"8dda0fdc-b48e-412a-87f6-07ffde936b2f","Type":"ContainerStarted","Data":"1a42833ff8a1784f5f2e2275c08d9de9d948266b2c61c88588e5b81da77d95e4"} Oct 07 00:19:39 crc kubenswrapper[4791]: I1007 00:19:39.072701 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" event={"ID":"8dda0fdc-b48e-412a-87f6-07ffde936b2f","Type":"ContainerStarted","Data":"5de0400bb5ae5fe208f5e5c8631e58f21a3fa0889540768620d59b5ea4c783e4"} Oct 07 00:19:39 crc kubenswrapper[4791]: I1007 00:19:39.072720 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" event={"ID":"8dda0fdc-b48e-412a-87f6-07ffde936b2f","Type":"ContainerStarted","Data":"1618e61af9640f17fdbab7f3fb7f72b30ee395adeaf20600669d9f6cf267596b"} Oct 07 00:19:39 crc kubenswrapper[4791]: I1007 00:19:39.072728 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" event={"ID":"8dda0fdc-b48e-412a-87f6-07ffde936b2f","Type":"ContainerStarted","Data":"2d2510fd69a08f70902d644ae817da9869b0c6e9794df5aca93bb334755d14c3"} Oct 07 00:19:41 crc kubenswrapper[4791]: I1007 00:19:41.090881 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" event={"ID":"8dda0fdc-b48e-412a-87f6-07ffde936b2f","Type":"ContainerStarted","Data":"6eb6842ba719beb4b91692b6cffeafa60a1c6fa3305d63614ef8a924e094b57e"} Oct 07 00:19:41 crc kubenswrapper[4791]: I1007 00:19:41.601064 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:19:41 crc kubenswrapper[4791]: I1007 00:19:41.601179 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:19:44 crc kubenswrapper[4791]: I1007 00:19:44.120776 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" event={"ID":"8dda0fdc-b48e-412a-87f6-07ffde936b2f","Type":"ContainerStarted","Data":"ff62c6843b0638f59d0a8a53671fe07627efebe3bee88ff8b9a73b8d5ca10f54"} Oct 07 00:19:44 crc kubenswrapper[4791]: I1007 00:19:44.121823 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:44 crc kubenswrapper[4791]: I1007 00:19:44.121845 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:44 crc kubenswrapper[4791]: I1007 00:19:44.121859 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:44 crc kubenswrapper[4791]: I1007 00:19:44.151428 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:44 crc kubenswrapper[4791]: I1007 00:19:44.158601 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" podStartSLOduration=8.158575874 podStartE2EDuration="8.158575874s" podCreationTimestamp="2025-10-07 00:19:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:19:44.155611411 +0000 UTC m=+510.751549062" watchObservedRunningTime="2025-10-07 00:19:44.158575874 +0000 UTC m=+510.754513525" Oct 07 00:19:44 crc kubenswrapper[4791]: I1007 00:19:44.164610 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:19:51 crc kubenswrapper[4791]: I1007 00:19:51.069225 4791 scope.go:117] "RemoveContainer" containerID="102807f499ff8337104e02abb4aabfcac759e06177c81422948fda65e540df1b" Oct 07 00:19:52 crc kubenswrapper[4791]: I1007 00:19:52.191842 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xbjfx_8a389028-af4a-4b2c-a638-04eac9238628/kube-multus/1.log" Oct 07 00:19:52 crc kubenswrapper[4791]: I1007 00:19:52.193040 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xbjfx" event={"ID":"8a389028-af4a-4b2c-a638-04eac9238628","Type":"ContainerStarted","Data":"d52906b1c294a74469b562fb12e56f8513673520b7440aa91705b56681e54ef1"} Oct 07 00:20:07 crc kubenswrapper[4791]: I1007 00:20:07.290495 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-t6hnw" Oct 07 00:20:11 crc kubenswrapper[4791]: I1007 00:20:11.601311 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:20:11 crc kubenswrapper[4791]: I1007 00:20:11.601816 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:20:41 crc kubenswrapper[4791]: I1007 00:20:41.601297 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:20:41 crc kubenswrapper[4791]: I1007 00:20:41.601895 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:20:41 crc kubenswrapper[4791]: I1007 00:20:41.601946 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:20:41 crc kubenswrapper[4791]: I1007 00:20:41.602551 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"31ce44ec022d50902d76172a803393157894bbd037a976f1807bc4f95c7c05c8"} pod="openshift-machine-config-operator/machine-config-daemon-h728c" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 00:20:41 crc kubenswrapper[4791]: I1007 00:20:41.602602 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" containerID="cri-o://31ce44ec022d50902d76172a803393157894bbd037a976f1807bc4f95c7c05c8" gracePeriod=600 Oct 07 00:20:42 crc kubenswrapper[4791]: I1007 00:20:42.507133 4791 generic.go:334] "Generic (PLEG): container finished" podID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerID="31ce44ec022d50902d76172a803393157894bbd037a976f1807bc4f95c7c05c8" exitCode=0 Oct 07 00:20:42 crc kubenswrapper[4791]: I1007 00:20:42.507169 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerDied","Data":"31ce44ec022d50902d76172a803393157894bbd037a976f1807bc4f95c7c05c8"} Oct 07 00:20:42 crc kubenswrapper[4791]: I1007 00:20:42.507665 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerStarted","Data":"37631d119a9543621b7ec3462a6ba16de3d5a41d64a99b73d2268d35fabb5173"} Oct 07 00:20:42 crc kubenswrapper[4791]: I1007 00:20:42.507685 4791 scope.go:117] "RemoveContainer" containerID="88997fcb013242215faf11bfd8064aa2df1ecd9fcdd1623ba0f00e9805ada21b" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.037244 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztf96"] Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.038702 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ztf96" podUID="dc7de000-f1f3-405f-b245-c1fbf2a23a6d" containerName="registry-server" containerID="cri-o://244a3a942353bfb40bd996d71389d8bb9f848249fddd520f454d7edda9d4d40d" gracePeriod=30 Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.420488 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.533179 4791 generic.go:334] "Generic (PLEG): container finished" podID="dc7de000-f1f3-405f-b245-c1fbf2a23a6d" containerID="244a3a942353bfb40bd996d71389d8bb9f848249fddd520f454d7edda9d4d40d" exitCode=0 Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.533253 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztf96" event={"ID":"dc7de000-f1f3-405f-b245-c1fbf2a23a6d","Type":"ContainerDied","Data":"244a3a942353bfb40bd996d71389d8bb9f848249fddd520f454d7edda9d4d40d"} Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.533661 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztf96" event={"ID":"dc7de000-f1f3-405f-b245-c1fbf2a23a6d","Type":"ContainerDied","Data":"09c048df2fb91e53b628dc8f333c1a9a852ffe9dfc52bf5c6fb78dbed812dda3"} Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.533266 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ztf96" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.533687 4791 scope.go:117] "RemoveContainer" containerID="244a3a942353bfb40bd996d71389d8bb9f848249fddd520f454d7edda9d4d40d" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.550009 4791 scope.go:117] "RemoveContainer" containerID="be8979dd8d2ea3ff78d4533153b228c0c6a6c7e68c84cd89b32dca283f2a1990" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.551789 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wk484\" (UniqueName: \"kubernetes.io/projected/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-kube-api-access-wk484\") pod \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\" (UID: \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\") " Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.551840 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-utilities\") pod \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\" (UID: \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\") " Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.551911 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-catalog-content\") pod \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\" (UID: \"dc7de000-f1f3-405f-b245-c1fbf2a23a6d\") " Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.553264 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-utilities" (OuterVolumeSpecName: "utilities") pod "dc7de000-f1f3-405f-b245-c1fbf2a23a6d" (UID: "dc7de000-f1f3-405f-b245-c1fbf2a23a6d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.558388 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-kube-api-access-wk484" (OuterVolumeSpecName: "kube-api-access-wk484") pod "dc7de000-f1f3-405f-b245-c1fbf2a23a6d" (UID: "dc7de000-f1f3-405f-b245-c1fbf2a23a6d"). InnerVolumeSpecName "kube-api-access-wk484". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.564807 4791 scope.go:117] "RemoveContainer" containerID="a61a9b909e9f6f635fc7943463a28a9e7c45889f070e88b22c78e6aec67a1cad" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.574776 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dc7de000-f1f3-405f-b245-c1fbf2a23a6d" (UID: "dc7de000-f1f3-405f-b245-c1fbf2a23a6d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.595464 4791 scope.go:117] "RemoveContainer" containerID="244a3a942353bfb40bd996d71389d8bb9f848249fddd520f454d7edda9d4d40d" Oct 07 00:20:46 crc kubenswrapper[4791]: E1007 00:20:46.596152 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"244a3a942353bfb40bd996d71389d8bb9f848249fddd520f454d7edda9d4d40d\": container with ID starting with 244a3a942353bfb40bd996d71389d8bb9f848249fddd520f454d7edda9d4d40d not found: ID does not exist" containerID="244a3a942353bfb40bd996d71389d8bb9f848249fddd520f454d7edda9d4d40d" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.596199 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"244a3a942353bfb40bd996d71389d8bb9f848249fddd520f454d7edda9d4d40d"} err="failed to get container status \"244a3a942353bfb40bd996d71389d8bb9f848249fddd520f454d7edda9d4d40d\": rpc error: code = NotFound desc = could not find container \"244a3a942353bfb40bd996d71389d8bb9f848249fddd520f454d7edda9d4d40d\": container with ID starting with 244a3a942353bfb40bd996d71389d8bb9f848249fddd520f454d7edda9d4d40d not found: ID does not exist" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.596232 4791 scope.go:117] "RemoveContainer" containerID="be8979dd8d2ea3ff78d4533153b228c0c6a6c7e68c84cd89b32dca283f2a1990" Oct 07 00:20:46 crc kubenswrapper[4791]: E1007 00:20:46.596665 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be8979dd8d2ea3ff78d4533153b228c0c6a6c7e68c84cd89b32dca283f2a1990\": container with ID starting with be8979dd8d2ea3ff78d4533153b228c0c6a6c7e68c84cd89b32dca283f2a1990 not found: ID does not exist" containerID="be8979dd8d2ea3ff78d4533153b228c0c6a6c7e68c84cd89b32dca283f2a1990" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.596696 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be8979dd8d2ea3ff78d4533153b228c0c6a6c7e68c84cd89b32dca283f2a1990"} err="failed to get container status \"be8979dd8d2ea3ff78d4533153b228c0c6a6c7e68c84cd89b32dca283f2a1990\": rpc error: code = NotFound desc = could not find container \"be8979dd8d2ea3ff78d4533153b228c0c6a6c7e68c84cd89b32dca283f2a1990\": container with ID starting with be8979dd8d2ea3ff78d4533153b228c0c6a6c7e68c84cd89b32dca283f2a1990 not found: ID does not exist" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.596715 4791 scope.go:117] "RemoveContainer" containerID="a61a9b909e9f6f635fc7943463a28a9e7c45889f070e88b22c78e6aec67a1cad" Oct 07 00:20:46 crc kubenswrapper[4791]: E1007 00:20:46.597126 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a61a9b909e9f6f635fc7943463a28a9e7c45889f070e88b22c78e6aec67a1cad\": container with ID starting with a61a9b909e9f6f635fc7943463a28a9e7c45889f070e88b22c78e6aec67a1cad not found: ID does not exist" containerID="a61a9b909e9f6f635fc7943463a28a9e7c45889f070e88b22c78e6aec67a1cad" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.597173 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a61a9b909e9f6f635fc7943463a28a9e7c45889f070e88b22c78e6aec67a1cad"} err="failed to get container status \"a61a9b909e9f6f635fc7943463a28a9e7c45889f070e88b22c78e6aec67a1cad\": rpc error: code = NotFound desc = could not find container \"a61a9b909e9f6f635fc7943463a28a9e7c45889f070e88b22c78e6aec67a1cad\": container with ID starting with a61a9b909e9f6f635fc7943463a28a9e7c45889f070e88b22c78e6aec67a1cad not found: ID does not exist" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.653881 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wk484\" (UniqueName: \"kubernetes.io/projected/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-kube-api-access-wk484\") on node \"crc\" DevicePath \"\"" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.653942 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.653955 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc7de000-f1f3-405f-b245-c1fbf2a23a6d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.887182 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztf96"] Oct 07 00:20:46 crc kubenswrapper[4791]: I1007 00:20:46.890920 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztf96"] Oct 07 00:20:48 crc kubenswrapper[4791]: I1007 00:20:48.074933 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc7de000-f1f3-405f-b245-c1fbf2a23a6d" path="/var/lib/kubelet/pods/dc7de000-f1f3-405f-b245-c1fbf2a23a6d/volumes" Oct 07 00:20:49 crc kubenswrapper[4791]: I1007 00:20:49.809928 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9"] Oct 07 00:20:49 crc kubenswrapper[4791]: E1007 00:20:49.810778 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc7de000-f1f3-405f-b245-c1fbf2a23a6d" containerName="extract-content" Oct 07 00:20:49 crc kubenswrapper[4791]: I1007 00:20:49.810801 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc7de000-f1f3-405f-b245-c1fbf2a23a6d" containerName="extract-content" Oct 07 00:20:49 crc kubenswrapper[4791]: E1007 00:20:49.810823 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc7de000-f1f3-405f-b245-c1fbf2a23a6d" containerName="extract-utilities" Oct 07 00:20:49 crc kubenswrapper[4791]: I1007 00:20:49.810842 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc7de000-f1f3-405f-b245-c1fbf2a23a6d" containerName="extract-utilities" Oct 07 00:20:49 crc kubenswrapper[4791]: E1007 00:20:49.810862 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc7de000-f1f3-405f-b245-c1fbf2a23a6d" containerName="registry-server" Oct 07 00:20:49 crc kubenswrapper[4791]: I1007 00:20:49.810875 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc7de000-f1f3-405f-b245-c1fbf2a23a6d" containerName="registry-server" Oct 07 00:20:49 crc kubenswrapper[4791]: I1007 00:20:49.811083 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc7de000-f1f3-405f-b245-c1fbf2a23a6d" containerName="registry-server" Oct 07 00:20:49 crc kubenswrapper[4791]: I1007 00:20:49.812568 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" Oct 07 00:20:49 crc kubenswrapper[4791]: I1007 00:20:49.816137 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 07 00:20:49 crc kubenswrapper[4791]: I1007 00:20:49.819742 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9"] Oct 07 00:20:49 crc kubenswrapper[4791]: I1007 00:20:49.900077 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qlws\" (UniqueName: \"kubernetes.io/projected/4e7a4cbd-842a-41b7-95f0-934349423df1-kube-api-access-6qlws\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9\" (UID: \"4e7a4cbd-842a-41b7-95f0-934349423df1\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" Oct 07 00:20:49 crc kubenswrapper[4791]: I1007 00:20:49.900150 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4e7a4cbd-842a-41b7-95f0-934349423df1-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9\" (UID: \"4e7a4cbd-842a-41b7-95f0-934349423df1\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" Oct 07 00:20:49 crc kubenswrapper[4791]: I1007 00:20:49.900182 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4e7a4cbd-842a-41b7-95f0-934349423df1-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9\" (UID: \"4e7a4cbd-842a-41b7-95f0-934349423df1\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" Oct 07 00:20:50 crc kubenswrapper[4791]: I1007 00:20:50.001091 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qlws\" (UniqueName: \"kubernetes.io/projected/4e7a4cbd-842a-41b7-95f0-934349423df1-kube-api-access-6qlws\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9\" (UID: \"4e7a4cbd-842a-41b7-95f0-934349423df1\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" Oct 07 00:20:50 crc kubenswrapper[4791]: I1007 00:20:50.001145 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4e7a4cbd-842a-41b7-95f0-934349423df1-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9\" (UID: \"4e7a4cbd-842a-41b7-95f0-934349423df1\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" Oct 07 00:20:50 crc kubenswrapper[4791]: I1007 00:20:50.001166 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4e7a4cbd-842a-41b7-95f0-934349423df1-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9\" (UID: \"4e7a4cbd-842a-41b7-95f0-934349423df1\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" Oct 07 00:20:50 crc kubenswrapper[4791]: I1007 00:20:50.001740 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4e7a4cbd-842a-41b7-95f0-934349423df1-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9\" (UID: \"4e7a4cbd-842a-41b7-95f0-934349423df1\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" Oct 07 00:20:50 crc kubenswrapper[4791]: I1007 00:20:50.001760 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4e7a4cbd-842a-41b7-95f0-934349423df1-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9\" (UID: \"4e7a4cbd-842a-41b7-95f0-934349423df1\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" Oct 07 00:20:50 crc kubenswrapper[4791]: I1007 00:20:50.020904 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qlws\" (UniqueName: \"kubernetes.io/projected/4e7a4cbd-842a-41b7-95f0-934349423df1-kube-api-access-6qlws\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9\" (UID: \"4e7a4cbd-842a-41b7-95f0-934349423df1\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" Oct 07 00:20:50 crc kubenswrapper[4791]: I1007 00:20:50.129594 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" Oct 07 00:20:50 crc kubenswrapper[4791]: I1007 00:20:50.560880 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9"] Oct 07 00:20:51 crc kubenswrapper[4791]: I1007 00:20:51.562813 4791 generic.go:334] "Generic (PLEG): container finished" podID="4e7a4cbd-842a-41b7-95f0-934349423df1" containerID="44774fdcf456f2aecc9a1cc1b763e16de490676cfa2e97c035ebe1b063708bd3" exitCode=0 Oct 07 00:20:51 crc kubenswrapper[4791]: I1007 00:20:51.563086 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" event={"ID":"4e7a4cbd-842a-41b7-95f0-934349423df1","Type":"ContainerDied","Data":"44774fdcf456f2aecc9a1cc1b763e16de490676cfa2e97c035ebe1b063708bd3"} Oct 07 00:20:51 crc kubenswrapper[4791]: I1007 00:20:51.563326 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" event={"ID":"4e7a4cbd-842a-41b7-95f0-934349423df1","Type":"ContainerStarted","Data":"a24d35097b89bf404ebd786a316f5358695155549db7af9481b93de9d7d3d064"} Oct 07 00:20:51 crc kubenswrapper[4791]: I1007 00:20:51.565245 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 00:20:53 crc kubenswrapper[4791]: I1007 00:20:53.578154 4791 generic.go:334] "Generic (PLEG): container finished" podID="4e7a4cbd-842a-41b7-95f0-934349423df1" containerID="3649701f5835a5d424d9a4b8757e777a70ac7095b05f4a3fe9df0a9b2fb31985" exitCode=0 Oct 07 00:20:53 crc kubenswrapper[4791]: I1007 00:20:53.578295 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" event={"ID":"4e7a4cbd-842a-41b7-95f0-934349423df1","Type":"ContainerDied","Data":"3649701f5835a5d424d9a4b8757e777a70ac7095b05f4a3fe9df0a9b2fb31985"} Oct 07 00:20:54 crc kubenswrapper[4791]: I1007 00:20:54.586259 4791 generic.go:334] "Generic (PLEG): container finished" podID="4e7a4cbd-842a-41b7-95f0-934349423df1" containerID="23d369415c6a5ca9436da356876baeb02300179e4fecb0f473b764a8a6ce18a5" exitCode=0 Oct 07 00:20:54 crc kubenswrapper[4791]: I1007 00:20:54.586312 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" event={"ID":"4e7a4cbd-842a-41b7-95f0-934349423df1","Type":"ContainerDied","Data":"23d369415c6a5ca9436da356876baeb02300179e4fecb0f473b764a8a6ce18a5"} Oct 07 00:20:55 crc kubenswrapper[4791]: I1007 00:20:55.789962 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" Oct 07 00:20:55 crc kubenswrapper[4791]: I1007 00:20:55.870011 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qlws\" (UniqueName: \"kubernetes.io/projected/4e7a4cbd-842a-41b7-95f0-934349423df1-kube-api-access-6qlws\") pod \"4e7a4cbd-842a-41b7-95f0-934349423df1\" (UID: \"4e7a4cbd-842a-41b7-95f0-934349423df1\") " Oct 07 00:20:55 crc kubenswrapper[4791]: I1007 00:20:55.870085 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4e7a4cbd-842a-41b7-95f0-934349423df1-bundle\") pod \"4e7a4cbd-842a-41b7-95f0-934349423df1\" (UID: \"4e7a4cbd-842a-41b7-95f0-934349423df1\") " Oct 07 00:20:55 crc kubenswrapper[4791]: I1007 00:20:55.870133 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4e7a4cbd-842a-41b7-95f0-934349423df1-util\") pod \"4e7a4cbd-842a-41b7-95f0-934349423df1\" (UID: \"4e7a4cbd-842a-41b7-95f0-934349423df1\") " Oct 07 00:20:55 crc kubenswrapper[4791]: I1007 00:20:55.872541 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e7a4cbd-842a-41b7-95f0-934349423df1-bundle" (OuterVolumeSpecName: "bundle") pod "4e7a4cbd-842a-41b7-95f0-934349423df1" (UID: "4e7a4cbd-842a-41b7-95f0-934349423df1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:20:55 crc kubenswrapper[4791]: I1007 00:20:55.878481 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e7a4cbd-842a-41b7-95f0-934349423df1-kube-api-access-6qlws" (OuterVolumeSpecName: "kube-api-access-6qlws") pod "4e7a4cbd-842a-41b7-95f0-934349423df1" (UID: "4e7a4cbd-842a-41b7-95f0-934349423df1"). InnerVolumeSpecName "kube-api-access-6qlws". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:20:55 crc kubenswrapper[4791]: I1007 00:20:55.886077 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e7a4cbd-842a-41b7-95f0-934349423df1-util" (OuterVolumeSpecName: "util") pod "4e7a4cbd-842a-41b7-95f0-934349423df1" (UID: "4e7a4cbd-842a-41b7-95f0-934349423df1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:20:55 crc kubenswrapper[4791]: I1007 00:20:55.970768 4791 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4e7a4cbd-842a-41b7-95f0-934349423df1-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:20:55 crc kubenswrapper[4791]: I1007 00:20:55.970810 4791 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4e7a4cbd-842a-41b7-95f0-934349423df1-util\") on node \"crc\" DevicePath \"\"" Oct 07 00:20:55 crc kubenswrapper[4791]: I1007 00:20:55.970823 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qlws\" (UniqueName: \"kubernetes.io/projected/4e7a4cbd-842a-41b7-95f0-934349423df1-kube-api-access-6qlws\") on node \"crc\" DevicePath \"\"" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.207264 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj"] Oct 07 00:20:56 crc kubenswrapper[4791]: E1007 00:20:56.208513 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e7a4cbd-842a-41b7-95f0-934349423df1" containerName="util" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.208531 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e7a4cbd-842a-41b7-95f0-934349423df1" containerName="util" Oct 07 00:20:56 crc kubenswrapper[4791]: E1007 00:20:56.208542 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e7a4cbd-842a-41b7-95f0-934349423df1" containerName="extract" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.208549 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e7a4cbd-842a-41b7-95f0-934349423df1" containerName="extract" Oct 07 00:20:56 crc kubenswrapper[4791]: E1007 00:20:56.208567 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e7a4cbd-842a-41b7-95f0-934349423df1" containerName="pull" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.208573 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e7a4cbd-842a-41b7-95f0-934349423df1" containerName="pull" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.208710 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e7a4cbd-842a-41b7-95f0-934349423df1" containerName="extract" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.209606 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.219378 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj"] Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.375897 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/385c48b7-e194-421e-a73f-f214f2666ed7-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj\" (UID: \"385c48b7-e194-421e-a73f-f214f2666ed7\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.376028 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2k2mb\" (UniqueName: \"kubernetes.io/projected/385c48b7-e194-421e-a73f-f214f2666ed7-kube-api-access-2k2mb\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj\" (UID: \"385c48b7-e194-421e-a73f-f214f2666ed7\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.376065 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/385c48b7-e194-421e-a73f-f214f2666ed7-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj\" (UID: \"385c48b7-e194-421e-a73f-f214f2666ed7\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.476983 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/385c48b7-e194-421e-a73f-f214f2666ed7-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj\" (UID: \"385c48b7-e194-421e-a73f-f214f2666ed7\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.477114 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2k2mb\" (UniqueName: \"kubernetes.io/projected/385c48b7-e194-421e-a73f-f214f2666ed7-kube-api-access-2k2mb\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj\" (UID: \"385c48b7-e194-421e-a73f-f214f2666ed7\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.477153 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/385c48b7-e194-421e-a73f-f214f2666ed7-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj\" (UID: \"385c48b7-e194-421e-a73f-f214f2666ed7\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.477806 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/385c48b7-e194-421e-a73f-f214f2666ed7-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj\" (UID: \"385c48b7-e194-421e-a73f-f214f2666ed7\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.477807 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/385c48b7-e194-421e-a73f-f214f2666ed7-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj\" (UID: \"385c48b7-e194-421e-a73f-f214f2666ed7\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.494683 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2k2mb\" (UniqueName: \"kubernetes.io/projected/385c48b7-e194-421e-a73f-f214f2666ed7-kube-api-access-2k2mb\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj\" (UID: \"385c48b7-e194-421e-a73f-f214f2666ed7\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.522646 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.606803 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" event={"ID":"4e7a4cbd-842a-41b7-95f0-934349423df1","Type":"ContainerDied","Data":"a24d35097b89bf404ebd786a316f5358695155549db7af9481b93de9d7d3d064"} Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.606847 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a24d35097b89bf404ebd786a316f5358695155549db7af9481b93de9d7d3d064" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.606956 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9" Oct 07 00:20:56 crc kubenswrapper[4791]: I1007 00:20:56.691165 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj"] Oct 07 00:20:56 crc kubenswrapper[4791]: W1007 00:20:56.697240 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod385c48b7_e194_421e_a73f_f214f2666ed7.slice/crio-29d41d99220ac4e2a758d4e1fc85155e8e3fe915a369b494274c3677ea8a0cb4 WatchSource:0}: Error finding container 29d41d99220ac4e2a758d4e1fc85155e8e3fe915a369b494274c3677ea8a0cb4: Status 404 returned error can't find the container with id 29d41d99220ac4e2a758d4e1fc85155e8e3fe915a369b494274c3677ea8a0cb4 Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.006309 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x"] Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.007776 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.016577 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x"] Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.085772 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-bundle\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x\" (UID: \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.085867 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crrdh\" (UniqueName: \"kubernetes.io/projected/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-kube-api-access-crrdh\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x\" (UID: \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.085901 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-util\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x\" (UID: \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.187072 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-bundle\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x\" (UID: \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.187156 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crrdh\" (UniqueName: \"kubernetes.io/projected/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-kube-api-access-crrdh\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x\" (UID: \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.187186 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-util\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x\" (UID: \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.187735 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-bundle\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x\" (UID: \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.187765 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-util\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x\" (UID: \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.207258 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crrdh\" (UniqueName: \"kubernetes.io/projected/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-kube-api-access-crrdh\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x\" (UID: \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.369971 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.547838 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x"] Oct 07 00:20:57 crc kubenswrapper[4791]: W1007 00:20:57.554491 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3293c35c_f4ba_4d9c_9f1a_5cf9620f7dd2.slice/crio-5eb07f7c0c3e50345070638a98b5902d8014f20d18558d8dd47869bac13d0156 WatchSource:0}: Error finding container 5eb07f7c0c3e50345070638a98b5902d8014f20d18558d8dd47869bac13d0156: Status 404 returned error can't find the container with id 5eb07f7c0c3e50345070638a98b5902d8014f20d18558d8dd47869bac13d0156 Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.613255 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" event={"ID":"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2","Type":"ContainerStarted","Data":"5eb07f7c0c3e50345070638a98b5902d8014f20d18558d8dd47869bac13d0156"} Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.615113 4791 generic.go:334] "Generic (PLEG): container finished" podID="385c48b7-e194-421e-a73f-f214f2666ed7" containerID="547dc8b9206df80c138a069f2b5ebc6eb47fb48efba2fabbff85f7fb24cde6ad" exitCode=0 Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.615157 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" event={"ID":"385c48b7-e194-421e-a73f-f214f2666ed7","Type":"ContainerDied","Data":"547dc8b9206df80c138a069f2b5ebc6eb47fb48efba2fabbff85f7fb24cde6ad"} Oct 07 00:20:57 crc kubenswrapper[4791]: I1007 00:20:57.615177 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" event={"ID":"385c48b7-e194-421e-a73f-f214f2666ed7","Type":"ContainerStarted","Data":"29d41d99220ac4e2a758d4e1fc85155e8e3fe915a369b494274c3677ea8a0cb4"} Oct 07 00:20:58 crc kubenswrapper[4791]: I1007 00:20:58.621411 4791 generic.go:334] "Generic (PLEG): container finished" podID="3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2" containerID="272d41324d34cae507291251406b2f9e1d08e329732e97ed2d8470cb2a7c4f75" exitCode=0 Oct 07 00:20:58 crc kubenswrapper[4791]: I1007 00:20:58.621489 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" event={"ID":"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2","Type":"ContainerDied","Data":"272d41324d34cae507291251406b2f9e1d08e329732e97ed2d8470cb2a7c4f75"} Oct 07 00:20:59 crc kubenswrapper[4791]: I1007 00:20:59.631474 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" event={"ID":"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2","Type":"ContainerStarted","Data":"f9e9f01641857e0109407fbd19c2802abc7d3358de87a7ea194908b0918bc5da"} Oct 07 00:20:59 crc kubenswrapper[4791]: I1007 00:20:59.634213 4791 generic.go:334] "Generic (PLEG): container finished" podID="385c48b7-e194-421e-a73f-f214f2666ed7" containerID="aa42182adf9dc27dad7a311d1e4112d4c76aa23308cc09aaee8130f92bcf663d" exitCode=0 Oct 07 00:20:59 crc kubenswrapper[4791]: I1007 00:20:59.634269 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" event={"ID":"385c48b7-e194-421e-a73f-f214f2666ed7","Type":"ContainerDied","Data":"aa42182adf9dc27dad7a311d1e4112d4c76aa23308cc09aaee8130f92bcf663d"} Oct 07 00:21:00 crc kubenswrapper[4791]: I1007 00:21:00.640773 4791 generic.go:334] "Generic (PLEG): container finished" podID="3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2" containerID="f9e9f01641857e0109407fbd19c2802abc7d3358de87a7ea194908b0918bc5da" exitCode=0 Oct 07 00:21:00 crc kubenswrapper[4791]: I1007 00:21:00.640848 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" event={"ID":"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2","Type":"ContainerDied","Data":"f9e9f01641857e0109407fbd19c2802abc7d3358de87a7ea194908b0918bc5da"} Oct 07 00:21:00 crc kubenswrapper[4791]: I1007 00:21:00.643551 4791 generic.go:334] "Generic (PLEG): container finished" podID="385c48b7-e194-421e-a73f-f214f2666ed7" containerID="a2149b8c2487405a34493d83a612e43ef9bbf296fe95be9435d8708ec21d555f" exitCode=0 Oct 07 00:21:00 crc kubenswrapper[4791]: I1007 00:21:00.643588 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" event={"ID":"385c48b7-e194-421e-a73f-f214f2666ed7","Type":"ContainerDied","Data":"a2149b8c2487405a34493d83a612e43ef9bbf296fe95be9435d8708ec21d555f"} Oct 07 00:21:01 crc kubenswrapper[4791]: I1007 00:21:01.652449 4791 generic.go:334] "Generic (PLEG): container finished" podID="3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2" containerID="10fcc064a21492e09d392340a4018b885c971539e9d0ed492da198326aac9c98" exitCode=0 Oct 07 00:21:01 crc kubenswrapper[4791]: I1007 00:21:01.653370 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" event={"ID":"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2","Type":"ContainerDied","Data":"10fcc064a21492e09d392340a4018b885c971539e9d0ed492da198326aac9c98"} Oct 07 00:21:02 crc kubenswrapper[4791]: I1007 00:21:02.023276 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" Oct 07 00:21:02 crc kubenswrapper[4791]: I1007 00:21:02.157027 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/385c48b7-e194-421e-a73f-f214f2666ed7-util\") pod \"385c48b7-e194-421e-a73f-f214f2666ed7\" (UID: \"385c48b7-e194-421e-a73f-f214f2666ed7\") " Oct 07 00:21:02 crc kubenswrapper[4791]: I1007 00:21:02.157190 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2k2mb\" (UniqueName: \"kubernetes.io/projected/385c48b7-e194-421e-a73f-f214f2666ed7-kube-api-access-2k2mb\") pod \"385c48b7-e194-421e-a73f-f214f2666ed7\" (UID: \"385c48b7-e194-421e-a73f-f214f2666ed7\") " Oct 07 00:21:02 crc kubenswrapper[4791]: I1007 00:21:02.157225 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/385c48b7-e194-421e-a73f-f214f2666ed7-bundle\") pod \"385c48b7-e194-421e-a73f-f214f2666ed7\" (UID: \"385c48b7-e194-421e-a73f-f214f2666ed7\") " Oct 07 00:21:02 crc kubenswrapper[4791]: I1007 00:21:02.157986 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/385c48b7-e194-421e-a73f-f214f2666ed7-bundle" (OuterVolumeSpecName: "bundle") pod "385c48b7-e194-421e-a73f-f214f2666ed7" (UID: "385c48b7-e194-421e-a73f-f214f2666ed7"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:21:02 crc kubenswrapper[4791]: I1007 00:21:02.176602 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/385c48b7-e194-421e-a73f-f214f2666ed7-kube-api-access-2k2mb" (OuterVolumeSpecName: "kube-api-access-2k2mb") pod "385c48b7-e194-421e-a73f-f214f2666ed7" (UID: "385c48b7-e194-421e-a73f-f214f2666ed7"). InnerVolumeSpecName "kube-api-access-2k2mb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:21:02 crc kubenswrapper[4791]: I1007 00:21:02.259204 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2k2mb\" (UniqueName: \"kubernetes.io/projected/385c48b7-e194-421e-a73f-f214f2666ed7-kube-api-access-2k2mb\") on node \"crc\" DevicePath \"\"" Oct 07 00:21:02 crc kubenswrapper[4791]: I1007 00:21:02.259258 4791 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/385c48b7-e194-421e-a73f-f214f2666ed7-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:21:02 crc kubenswrapper[4791]: I1007 00:21:02.368009 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/385c48b7-e194-421e-a73f-f214f2666ed7-util" (OuterVolumeSpecName: "util") pod "385c48b7-e194-421e-a73f-f214f2666ed7" (UID: "385c48b7-e194-421e-a73f-f214f2666ed7"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:21:02 crc kubenswrapper[4791]: I1007 00:21:02.462028 4791 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/385c48b7-e194-421e-a73f-f214f2666ed7-util\") on node \"crc\" DevicePath \"\"" Oct 07 00:21:02 crc kubenswrapper[4791]: I1007 00:21:02.661201 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" Oct 07 00:21:02 crc kubenswrapper[4791]: I1007 00:21:02.661225 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj" event={"ID":"385c48b7-e194-421e-a73f-f214f2666ed7","Type":"ContainerDied","Data":"29d41d99220ac4e2a758d4e1fc85155e8e3fe915a369b494274c3677ea8a0cb4"} Oct 07 00:21:02 crc kubenswrapper[4791]: I1007 00:21:02.661274 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29d41d99220ac4e2a758d4e1fc85155e8e3fe915a369b494274c3677ea8a0cb4" Oct 07 00:21:03 crc kubenswrapper[4791]: I1007 00:21:03.028002 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" Oct 07 00:21:03 crc kubenswrapper[4791]: I1007 00:21:03.194117 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-bundle\") pod \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\" (UID: \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\") " Oct 07 00:21:03 crc kubenswrapper[4791]: I1007 00:21:03.194250 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crrdh\" (UniqueName: \"kubernetes.io/projected/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-kube-api-access-crrdh\") pod \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\" (UID: \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\") " Oct 07 00:21:03 crc kubenswrapper[4791]: I1007 00:21:03.194441 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-util\") pod \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\" (UID: \"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2\") " Oct 07 00:21:03 crc kubenswrapper[4791]: I1007 00:21:03.195368 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-bundle" (OuterVolumeSpecName: "bundle") pod "3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2" (UID: "3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:21:03 crc kubenswrapper[4791]: I1007 00:21:03.199727 4791 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:21:03 crc kubenswrapper[4791]: I1007 00:21:03.215743 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-kube-api-access-crrdh" (OuterVolumeSpecName: "kube-api-access-crrdh") pod "3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2" (UID: "3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2"). InnerVolumeSpecName "kube-api-access-crrdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:21:03 crc kubenswrapper[4791]: I1007 00:21:03.300823 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crrdh\" (UniqueName: \"kubernetes.io/projected/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-kube-api-access-crrdh\") on node \"crc\" DevicePath \"\"" Oct 07 00:21:03 crc kubenswrapper[4791]: I1007 00:21:03.523511 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-util" (OuterVolumeSpecName: "util") pod "3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2" (UID: "3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:21:03 crc kubenswrapper[4791]: I1007 00:21:03.603507 4791 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2-util\") on node \"crc\" DevicePath \"\"" Oct 07 00:21:03 crc kubenswrapper[4791]: I1007 00:21:03.673510 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" event={"ID":"3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2","Type":"ContainerDied","Data":"5eb07f7c0c3e50345070638a98b5902d8014f20d18558d8dd47869bac13d0156"} Oct 07 00:21:03 crc kubenswrapper[4791]: I1007 00:21:03.673561 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5eb07f7c0c3e50345070638a98b5902d8014f20d18558d8dd47869bac13d0156" Oct 07 00:21:03 crc kubenswrapper[4791]: I1007 00:21:03.673655 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.226129 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj"] Oct 07 00:21:05 crc kubenswrapper[4791]: E1007 00:21:05.227141 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="385c48b7-e194-421e-a73f-f214f2666ed7" containerName="util" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.227161 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="385c48b7-e194-421e-a73f-f214f2666ed7" containerName="util" Oct 07 00:21:05 crc kubenswrapper[4791]: E1007 00:21:05.227182 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2" containerName="pull" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.227190 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2" containerName="pull" Oct 07 00:21:05 crc kubenswrapper[4791]: E1007 00:21:05.227199 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2" containerName="extract" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.227208 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2" containerName="extract" Oct 07 00:21:05 crc kubenswrapper[4791]: E1007 00:21:05.227219 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="385c48b7-e194-421e-a73f-f214f2666ed7" containerName="extract" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.227226 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="385c48b7-e194-421e-a73f-f214f2666ed7" containerName="extract" Oct 07 00:21:05 crc kubenswrapper[4791]: E1007 00:21:05.227240 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2" containerName="util" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.227250 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2" containerName="util" Oct 07 00:21:05 crc kubenswrapper[4791]: E1007 00:21:05.227263 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="385c48b7-e194-421e-a73f-f214f2666ed7" containerName="pull" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.227270 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="385c48b7-e194-421e-a73f-f214f2666ed7" containerName="pull" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.227415 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="385c48b7-e194-421e-a73f-f214f2666ed7" containerName="extract" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.227435 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2" containerName="extract" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.228568 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.231861 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.246515 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj"] Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.430316 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/637b38c4-c723-4e98-afd3-897f73d13259-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj\" (UID: \"637b38c4-c723-4e98-afd3-897f73d13259\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.430466 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbb5z\" (UniqueName: \"kubernetes.io/projected/637b38c4-c723-4e98-afd3-897f73d13259-kube-api-access-wbb5z\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj\" (UID: \"637b38c4-c723-4e98-afd3-897f73d13259\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.430502 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/637b38c4-c723-4e98-afd3-897f73d13259-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj\" (UID: \"637b38c4-c723-4e98-afd3-897f73d13259\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.531444 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/637b38c4-c723-4e98-afd3-897f73d13259-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj\" (UID: \"637b38c4-c723-4e98-afd3-897f73d13259\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.531553 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/637b38c4-c723-4e98-afd3-897f73d13259-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj\" (UID: \"637b38c4-c723-4e98-afd3-897f73d13259\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.531641 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbb5z\" (UniqueName: \"kubernetes.io/projected/637b38c4-c723-4e98-afd3-897f73d13259-kube-api-access-wbb5z\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj\" (UID: \"637b38c4-c723-4e98-afd3-897f73d13259\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.532036 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/637b38c4-c723-4e98-afd3-897f73d13259-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj\" (UID: \"637b38c4-c723-4e98-afd3-897f73d13259\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.532177 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/637b38c4-c723-4e98-afd3-897f73d13259-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj\" (UID: \"637b38c4-c723-4e98-afd3-897f73d13259\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.553082 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbb5z\" (UniqueName: \"kubernetes.io/projected/637b38c4-c723-4e98-afd3-897f73d13259-kube-api-access-wbb5z\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj\" (UID: \"637b38c4-c723-4e98-afd3-897f73d13259\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" Oct 07 00:21:05 crc kubenswrapper[4791]: I1007 00:21:05.847494 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.334656 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj"] Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.535048 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-7n58g"] Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.536100 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-7n58g" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.538719 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-f6fnw" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.541006 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.541279 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.556730 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-7n58g"] Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.599479 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd"] Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.600214 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.608069 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.608416 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-cmdq8" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.618595 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr"] Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.619582 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.621306 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd"] Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.645570 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcr68\" (UniqueName: \"kubernetes.io/projected/f3a541f9-9f16-46d8-bf15-61223084be30-kube-api-access-pcr68\") pod \"obo-prometheus-operator-7c8cf85677-7n58g\" (UID: \"f3a541f9-9f16-46d8-bf15-61223084be30\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-7n58g" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.654553 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr"] Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.693225 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" event={"ID":"637b38c4-c723-4e98-afd3-897f73d13259","Type":"ContainerStarted","Data":"52fc0f986a216bc11174c72829b19cec2617d5a892c1bee9a149ab58c10db6d2"} Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.693565 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" event={"ID":"637b38c4-c723-4e98-afd3-897f73d13259","Type":"ContainerStarted","Data":"ae8781021e3c4d104c0cf2baf819bd4e4cf31b392b724b0b3a40f99c6b28c650"} Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.746713 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5a672a12-8d07-4ce4-a94e-b3e66473f35c-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr\" (UID: \"5a672a12-8d07-4ce4-a94e-b3e66473f35c\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.746786 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcr68\" (UniqueName: \"kubernetes.io/projected/f3a541f9-9f16-46d8-bf15-61223084be30-kube-api-access-pcr68\") pod \"obo-prometheus-operator-7c8cf85677-7n58g\" (UID: \"f3a541f9-9f16-46d8-bf15-61223084be30\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-7n58g" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.746855 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d71a9a4d-ce1e-4b77-943c-33bdf244ddc5-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd\" (UID: \"d71a9a4d-ce1e-4b77-943c-33bdf244ddc5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.746879 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5a672a12-8d07-4ce4-a94e-b3e66473f35c-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr\" (UID: \"5a672a12-8d07-4ce4-a94e-b3e66473f35c\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.746957 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d71a9a4d-ce1e-4b77-943c-33bdf244ddc5-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd\" (UID: \"d71a9a4d-ce1e-4b77-943c-33bdf244ddc5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.758880 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-bcfdp"] Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.759835 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-bcfdp" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.762308 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-2k8j9" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.762514 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.773027 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcr68\" (UniqueName: \"kubernetes.io/projected/f3a541f9-9f16-46d8-bf15-61223084be30-kube-api-access-pcr68\") pod \"obo-prometheus-operator-7c8cf85677-7n58g\" (UID: \"f3a541f9-9f16-46d8-bf15-61223084be30\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-7n58g" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.782814 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-bcfdp"] Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.849485 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d71a9a4d-ce1e-4b77-943c-33bdf244ddc5-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd\" (UID: \"d71a9a4d-ce1e-4b77-943c-33bdf244ddc5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.849548 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5a672a12-8d07-4ce4-a94e-b3e66473f35c-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr\" (UID: \"5a672a12-8d07-4ce4-a94e-b3e66473f35c\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.849590 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-bcfdp\" (UID: \"15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6\") " pod="openshift-operators/observability-operator-cc5f78dfc-bcfdp" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.849620 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t98nj\" (UniqueName: \"kubernetes.io/projected/15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6-kube-api-access-t98nj\") pod \"observability-operator-cc5f78dfc-bcfdp\" (UID: \"15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6\") " pod="openshift-operators/observability-operator-cc5f78dfc-bcfdp" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.849679 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d71a9a4d-ce1e-4b77-943c-33bdf244ddc5-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd\" (UID: \"d71a9a4d-ce1e-4b77-943c-33bdf244ddc5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.849740 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5a672a12-8d07-4ce4-a94e-b3e66473f35c-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr\" (UID: \"5a672a12-8d07-4ce4-a94e-b3e66473f35c\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.852434 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-7n58g" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.852940 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d71a9a4d-ce1e-4b77-943c-33bdf244ddc5-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd\" (UID: \"d71a9a4d-ce1e-4b77-943c-33bdf244ddc5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.853488 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5a672a12-8d07-4ce4-a94e-b3e66473f35c-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr\" (UID: \"5a672a12-8d07-4ce4-a94e-b3e66473f35c\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.872185 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5a672a12-8d07-4ce4-a94e-b3e66473f35c-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr\" (UID: \"5a672a12-8d07-4ce4-a94e-b3e66473f35c\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.872194 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d71a9a4d-ce1e-4b77-943c-33bdf244ddc5-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd\" (UID: \"d71a9a4d-ce1e-4b77-943c-33bdf244ddc5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.920217 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.936013 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.950843 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t98nj\" (UniqueName: \"kubernetes.io/projected/15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6-kube-api-access-t98nj\") pod \"observability-operator-cc5f78dfc-bcfdp\" (UID: \"15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6\") " pod="openshift-operators/observability-operator-cc5f78dfc-bcfdp" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.950990 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-bcfdp\" (UID: \"15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6\") " pod="openshift-operators/observability-operator-cc5f78dfc-bcfdp" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.964768 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-bcfdp\" (UID: \"15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6\") " pod="openshift-operators/observability-operator-cc5f78dfc-bcfdp" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.966606 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-q5556"] Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.967532 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-q5556" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.974686 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-4glz7" Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.980737 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-q5556"] Oct 07 00:21:06 crc kubenswrapper[4791]: I1007 00:21:06.989076 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t98nj\" (UniqueName: \"kubernetes.io/projected/15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6-kube-api-access-t98nj\") pod \"observability-operator-cc5f78dfc-bcfdp\" (UID: \"15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6\") " pod="openshift-operators/observability-operator-cc5f78dfc-bcfdp" Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.081833 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-bcfdp" Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.161741 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtb95\" (UniqueName: \"kubernetes.io/projected/84d5cca0-920e-4cc6-ae49-a848e3255ab7-kube-api-access-vtb95\") pod \"perses-operator-54bc95c9fb-q5556\" (UID: \"84d5cca0-920e-4cc6-ae49-a848e3255ab7\") " pod="openshift-operators/perses-operator-54bc95c9fb-q5556" Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.162187 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/84d5cca0-920e-4cc6-ae49-a848e3255ab7-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-q5556\" (UID: \"84d5cca0-920e-4cc6-ae49-a848e3255ab7\") " pod="openshift-operators/perses-operator-54bc95c9fb-q5556" Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.264506 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/84d5cca0-920e-4cc6-ae49-a848e3255ab7-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-q5556\" (UID: \"84d5cca0-920e-4cc6-ae49-a848e3255ab7\") " pod="openshift-operators/perses-operator-54bc95c9fb-q5556" Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.264583 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtb95\" (UniqueName: \"kubernetes.io/projected/84d5cca0-920e-4cc6-ae49-a848e3255ab7-kube-api-access-vtb95\") pod \"perses-operator-54bc95c9fb-q5556\" (UID: \"84d5cca0-920e-4cc6-ae49-a848e3255ab7\") " pod="openshift-operators/perses-operator-54bc95c9fb-q5556" Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.265695 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/84d5cca0-920e-4cc6-ae49-a848e3255ab7-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-q5556\" (UID: \"84d5cca0-920e-4cc6-ae49-a848e3255ab7\") " pod="openshift-operators/perses-operator-54bc95c9fb-q5556" Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.290258 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtb95\" (UniqueName: \"kubernetes.io/projected/84d5cca0-920e-4cc6-ae49-a848e3255ab7-kube-api-access-vtb95\") pod \"perses-operator-54bc95c9fb-q5556\" (UID: \"84d5cca0-920e-4cc6-ae49-a848e3255ab7\") " pod="openshift-operators/perses-operator-54bc95c9fb-q5556" Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.349061 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-q5556" Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.361341 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-7n58g"] Oct 07 00:21:07 crc kubenswrapper[4791]: W1007 00:21:07.368478 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf3a541f9_9f16_46d8_bf15_61223084be30.slice/crio-985eb536e3f8911b303857ef71b86d117bffa8868179b4838a933c7e6410ee62 WatchSource:0}: Error finding container 985eb536e3f8911b303857ef71b86d117bffa8868179b4838a933c7e6410ee62: Status 404 returned error can't find the container with id 985eb536e3f8911b303857ef71b86d117bffa8868179b4838a933c7e6410ee62 Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.407793 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr"] Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.439310 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-bcfdp"] Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.563447 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd"] Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.688695 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-q5556"] Oct 07 00:21:07 crc kubenswrapper[4791]: W1007 00:21:07.699057 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84d5cca0_920e_4cc6_ae49_a848e3255ab7.slice/crio-f7769ae83acdd710c73ce7dbcf7342a582adeafd2ca1600d95025b717144d7f5 WatchSource:0}: Error finding container f7769ae83acdd710c73ce7dbcf7342a582adeafd2ca1600d95025b717144d7f5: Status 404 returned error can't find the container with id f7769ae83acdd710c73ce7dbcf7342a582adeafd2ca1600d95025b717144d7f5 Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.704519 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd" event={"ID":"d71a9a4d-ce1e-4b77-943c-33bdf244ddc5","Type":"ContainerStarted","Data":"a8feafefc977a2cd8c960d838fb8de1db6be52250c437062328c2ad495e8a531"} Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.707761 4791 generic.go:334] "Generic (PLEG): container finished" podID="637b38c4-c723-4e98-afd3-897f73d13259" containerID="52fc0f986a216bc11174c72829b19cec2617d5a892c1bee9a149ab58c10db6d2" exitCode=0 Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.707828 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" event={"ID":"637b38c4-c723-4e98-afd3-897f73d13259","Type":"ContainerDied","Data":"52fc0f986a216bc11174c72829b19cec2617d5a892c1bee9a149ab58c10db6d2"} Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.711454 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-7n58g" event={"ID":"f3a541f9-9f16-46d8-bf15-61223084be30","Type":"ContainerStarted","Data":"985eb536e3f8911b303857ef71b86d117bffa8868179b4838a933c7e6410ee62"} Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.714960 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr" event={"ID":"5a672a12-8d07-4ce4-a94e-b3e66473f35c","Type":"ContainerStarted","Data":"8f51a04b66c3599eb2d06397c93cd9e7f2c4b0651c535971a060fb6b98db9c9d"} Oct 07 00:21:07 crc kubenswrapper[4791]: I1007 00:21:07.716894 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-bcfdp" event={"ID":"15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6","Type":"ContainerStarted","Data":"45e80418caa63369014cd64d1795e8835811470536dadb13d063f6603409e6c2"} Oct 07 00:21:08 crc kubenswrapper[4791]: I1007 00:21:08.749661 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-q5556" event={"ID":"84d5cca0-920e-4cc6-ae49-a848e3255ab7","Type":"ContainerStarted","Data":"f7769ae83acdd710c73ce7dbcf7342a582adeafd2ca1600d95025b717144d7f5"} Oct 07 00:21:12 crc kubenswrapper[4791]: I1007 00:21:12.507584 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-8jhw8"] Oct 07 00:21:12 crc kubenswrapper[4791]: I1007 00:21:12.509653 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-8jhw8" Oct 07 00:21:12 crc kubenswrapper[4791]: I1007 00:21:12.515529 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"kube-root-ca.crt" Oct 07 00:21:12 crc kubenswrapper[4791]: I1007 00:21:12.515613 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"openshift-service-ca.crt" Oct 07 00:21:12 crc kubenswrapper[4791]: I1007 00:21:12.516378 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"interconnect-operator-dockercfg-5fz45" Oct 07 00:21:12 crc kubenswrapper[4791]: I1007 00:21:12.530630 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-8jhw8"] Oct 07 00:21:12 crc kubenswrapper[4791]: I1007 00:21:12.670241 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlzl2\" (UniqueName: \"kubernetes.io/projected/85eafc77-d412-45d0-98bc-412413cc803b-kube-api-access-nlzl2\") pod \"interconnect-operator-5bb49f789d-8jhw8\" (UID: \"85eafc77-d412-45d0-98bc-412413cc803b\") " pod="service-telemetry/interconnect-operator-5bb49f789d-8jhw8" Oct 07 00:21:12 crc kubenswrapper[4791]: I1007 00:21:12.771378 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlzl2\" (UniqueName: \"kubernetes.io/projected/85eafc77-d412-45d0-98bc-412413cc803b-kube-api-access-nlzl2\") pod \"interconnect-operator-5bb49f789d-8jhw8\" (UID: \"85eafc77-d412-45d0-98bc-412413cc803b\") " pod="service-telemetry/interconnect-operator-5bb49f789d-8jhw8" Oct 07 00:21:12 crc kubenswrapper[4791]: I1007 00:21:12.800208 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlzl2\" (UniqueName: \"kubernetes.io/projected/85eafc77-d412-45d0-98bc-412413cc803b-kube-api-access-nlzl2\") pod \"interconnect-operator-5bb49f789d-8jhw8\" (UID: \"85eafc77-d412-45d0-98bc-412413cc803b\") " pod="service-telemetry/interconnect-operator-5bb49f789d-8jhw8" Oct 07 00:21:12 crc kubenswrapper[4791]: I1007 00:21:12.847101 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-8jhw8" Oct 07 00:21:15 crc kubenswrapper[4791]: I1007 00:21:15.713432 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elastic-operator-6c6d7577b7-pd95l"] Oct 07 00:21:15 crc kubenswrapper[4791]: I1007 00:21:15.714943 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-6c6d7577b7-pd95l" Oct 07 00:21:15 crc kubenswrapper[4791]: I1007 00:21:15.718956 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-service-cert" Oct 07 00:21:15 crc kubenswrapper[4791]: I1007 00:21:15.719811 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-dockercfg-h5bk9" Oct 07 00:21:15 crc kubenswrapper[4791]: I1007 00:21:15.727379 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-6c6d7577b7-pd95l"] Oct 07 00:21:15 crc kubenswrapper[4791]: I1007 00:21:15.820234 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpqmk\" (UniqueName: \"kubernetes.io/projected/731b2406-13b4-466a-889f-5fc62bf64ed3-kube-api-access-lpqmk\") pod \"elastic-operator-6c6d7577b7-pd95l\" (UID: \"731b2406-13b4-466a-889f-5fc62bf64ed3\") " pod="service-telemetry/elastic-operator-6c6d7577b7-pd95l" Oct 07 00:21:15 crc kubenswrapper[4791]: I1007 00:21:15.820311 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/731b2406-13b4-466a-889f-5fc62bf64ed3-webhook-cert\") pod \"elastic-operator-6c6d7577b7-pd95l\" (UID: \"731b2406-13b4-466a-889f-5fc62bf64ed3\") " pod="service-telemetry/elastic-operator-6c6d7577b7-pd95l" Oct 07 00:21:15 crc kubenswrapper[4791]: I1007 00:21:15.820419 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/731b2406-13b4-466a-889f-5fc62bf64ed3-apiservice-cert\") pod \"elastic-operator-6c6d7577b7-pd95l\" (UID: \"731b2406-13b4-466a-889f-5fc62bf64ed3\") " pod="service-telemetry/elastic-operator-6c6d7577b7-pd95l" Oct 07 00:21:15 crc kubenswrapper[4791]: I1007 00:21:15.922125 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpqmk\" (UniqueName: \"kubernetes.io/projected/731b2406-13b4-466a-889f-5fc62bf64ed3-kube-api-access-lpqmk\") pod \"elastic-operator-6c6d7577b7-pd95l\" (UID: \"731b2406-13b4-466a-889f-5fc62bf64ed3\") " pod="service-telemetry/elastic-operator-6c6d7577b7-pd95l" Oct 07 00:21:15 crc kubenswrapper[4791]: I1007 00:21:15.922192 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/731b2406-13b4-466a-889f-5fc62bf64ed3-webhook-cert\") pod \"elastic-operator-6c6d7577b7-pd95l\" (UID: \"731b2406-13b4-466a-889f-5fc62bf64ed3\") " pod="service-telemetry/elastic-operator-6c6d7577b7-pd95l" Oct 07 00:21:15 crc kubenswrapper[4791]: I1007 00:21:15.922232 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/731b2406-13b4-466a-889f-5fc62bf64ed3-apiservice-cert\") pod \"elastic-operator-6c6d7577b7-pd95l\" (UID: \"731b2406-13b4-466a-889f-5fc62bf64ed3\") " pod="service-telemetry/elastic-operator-6c6d7577b7-pd95l" Oct 07 00:21:15 crc kubenswrapper[4791]: I1007 00:21:15.944517 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/731b2406-13b4-466a-889f-5fc62bf64ed3-webhook-cert\") pod \"elastic-operator-6c6d7577b7-pd95l\" (UID: \"731b2406-13b4-466a-889f-5fc62bf64ed3\") " pod="service-telemetry/elastic-operator-6c6d7577b7-pd95l" Oct 07 00:21:15 crc kubenswrapper[4791]: I1007 00:21:15.959008 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpqmk\" (UniqueName: \"kubernetes.io/projected/731b2406-13b4-466a-889f-5fc62bf64ed3-kube-api-access-lpqmk\") pod \"elastic-operator-6c6d7577b7-pd95l\" (UID: \"731b2406-13b4-466a-889f-5fc62bf64ed3\") " pod="service-telemetry/elastic-operator-6c6d7577b7-pd95l" Oct 07 00:21:15 crc kubenswrapper[4791]: I1007 00:21:15.962298 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/731b2406-13b4-466a-889f-5fc62bf64ed3-apiservice-cert\") pod \"elastic-operator-6c6d7577b7-pd95l\" (UID: \"731b2406-13b4-466a-889f-5fc62bf64ed3\") " pod="service-telemetry/elastic-operator-6c6d7577b7-pd95l" Oct 07 00:21:16 crc kubenswrapper[4791]: I1007 00:21:16.035163 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-6c6d7577b7-pd95l" Oct 07 00:21:24 crc kubenswrapper[4791]: E1007 00:21:24.494298 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e2681bce57dc9c15701f5591532c2dfe8f19778606661339553a28dc003dbca5" Oct 07 00:21:24 crc kubenswrapper[4791]: E1007 00:21:24.495232 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e2681bce57dc9c15701f5591532c2dfe8f19778606661339553a28dc003dbca5,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:8597c48fc71fc6ec8e87dbe40dace4dbb7b817c1039db608af76a0d90f7ac2d0,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.2.2,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pcr68,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-7c8cf85677-7n58g_openshift-operators(f3a541f9-9f16-46d8-bf15-61223084be30): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 00:21:24 crc kubenswrapper[4791]: E1007 00:21:24.496470 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-7n58g" podUID="f3a541f9-9f16-46d8-bf15-61223084be30" Oct 07 00:21:24 crc kubenswrapper[4791]: I1007 00:21:24.850723 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-8jhw8"] Oct 07 00:21:24 crc kubenswrapper[4791]: I1007 00:21:24.986708 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-bcfdp" event={"ID":"15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6","Type":"ContainerStarted","Data":"7d6900075664bf79222718b927b3887971356454932ccff2e5244474eeac67cf"} Oct 07 00:21:24 crc kubenswrapper[4791]: I1007 00:21:24.988714 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-cc5f78dfc-bcfdp" Oct 07 00:21:24 crc kubenswrapper[4791]: I1007 00:21:24.988786 4791 patch_prober.go:28] interesting pod/observability-operator-cc5f78dfc-bcfdp container/operator namespace/openshift-operators: Readiness probe status=failure output="Get \"http://10.217.0.47:8081/healthz\": dial tcp 10.217.0.47:8081: connect: connection refused" start-of-body= Oct 07 00:21:24 crc kubenswrapper[4791]: I1007 00:21:24.988814 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operators/observability-operator-cc5f78dfc-bcfdp" podUID="15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6" containerName="operator" probeResult="failure" output="Get \"http://10.217.0.47:8081/healthz\": dial tcp 10.217.0.47:8081: connect: connection refused" Oct 07 00:21:24 crc kubenswrapper[4791]: I1007 00:21:24.996468 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-6c6d7577b7-pd95l"] Oct 07 00:21:25 crc kubenswrapper[4791]: I1007 00:21:25.008194 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd" event={"ID":"d71a9a4d-ce1e-4b77-943c-33bdf244ddc5","Type":"ContainerStarted","Data":"33b19e2716e94140aae69004ad8168b1bbfc9f9c66f727ede0ae8cf406c4cddd"} Oct 07 00:21:25 crc kubenswrapper[4791]: I1007 00:21:25.020048 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-8jhw8" event={"ID":"85eafc77-d412-45d0-98bc-412413cc803b","Type":"ContainerStarted","Data":"76575de5fa1547040eadaa057a21bdbabe33b1c7329abc3256178bc562e95c68"} Oct 07 00:21:25 crc kubenswrapper[4791]: I1007 00:21:25.028752 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-q5556" event={"ID":"84d5cca0-920e-4cc6-ae49-a848e3255ab7","Type":"ContainerStarted","Data":"00bac7721bbe074831e5030abf1e9947e2a01171b3b1690aaa50a55d0f0a4873"} Oct 07 00:21:25 crc kubenswrapper[4791]: I1007 00:21:25.029269 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-54bc95c9fb-q5556" Oct 07 00:21:25 crc kubenswrapper[4791]: I1007 00:21:25.031273 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" event={"ID":"637b38c4-c723-4e98-afd3-897f73d13259","Type":"ContainerStarted","Data":"b15e56fc8da0fd43848fb0da5be7d1187d8223befe8dac7c6fb0655f9b0c4500"} Oct 07 00:21:25 crc kubenswrapper[4791]: E1007 00:21:25.031618 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e2681bce57dc9c15701f5591532c2dfe8f19778606661339553a28dc003dbca5\\\"\"" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-7n58g" podUID="f3a541f9-9f16-46d8-bf15-61223084be30" Oct 07 00:21:25 crc kubenswrapper[4791]: I1007 00:21:25.040551 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-cc5f78dfc-bcfdp" podStartSLOduration=1.9341832829999999 podStartE2EDuration="19.040527526s" podCreationTimestamp="2025-10-07 00:21:06 +0000 UTC" firstStartedPulling="2025-10-07 00:21:07.455114763 +0000 UTC m=+594.051052414" lastFinishedPulling="2025-10-07 00:21:24.561459006 +0000 UTC m=+611.157396657" observedRunningTime="2025-10-07 00:21:25.028809353 +0000 UTC m=+611.624747004" watchObservedRunningTime="2025-10-07 00:21:25.040527526 +0000 UTC m=+611.636465177" Oct 07 00:21:25 crc kubenswrapper[4791]: I1007 00:21:25.078026 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd" podStartSLOduration=2.167896707 podStartE2EDuration="19.078002335s" podCreationTimestamp="2025-10-07 00:21:06 +0000 UTC" firstStartedPulling="2025-10-07 00:21:07.587680314 +0000 UTC m=+594.183617965" lastFinishedPulling="2025-10-07 00:21:24.497785942 +0000 UTC m=+611.093723593" observedRunningTime="2025-10-07 00:21:25.074750134 +0000 UTC m=+611.670687795" watchObservedRunningTime="2025-10-07 00:21:25.078002335 +0000 UTC m=+611.673939986" Oct 07 00:21:25 crc kubenswrapper[4791]: I1007 00:21:25.115958 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-54bc95c9fb-q5556" podStartSLOduration=2.2987086899999998 podStartE2EDuration="19.115940503s" podCreationTimestamp="2025-10-07 00:21:06 +0000 UTC" firstStartedPulling="2025-10-07 00:21:07.70378171 +0000 UTC m=+594.299719361" lastFinishedPulling="2025-10-07 00:21:24.521013523 +0000 UTC m=+611.116951174" observedRunningTime="2025-10-07 00:21:25.114175505 +0000 UTC m=+611.710113166" watchObservedRunningTime="2025-10-07 00:21:25.115940503 +0000 UTC m=+611.711878154" Oct 07 00:21:26 crc kubenswrapper[4791]: I1007 00:21:26.044948 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-6c6d7577b7-pd95l" event={"ID":"731b2406-13b4-466a-889f-5fc62bf64ed3","Type":"ContainerStarted","Data":"642adab03b4454e59c4b7af9c02916cb54cd726cfba5e58829fa91da428cc2cc"} Oct 07 00:21:26 crc kubenswrapper[4791]: I1007 00:21:26.054794 4791 generic.go:334] "Generic (PLEG): container finished" podID="637b38c4-c723-4e98-afd3-897f73d13259" containerID="b15e56fc8da0fd43848fb0da5be7d1187d8223befe8dac7c6fb0655f9b0c4500" exitCode=0 Oct 07 00:21:26 crc kubenswrapper[4791]: I1007 00:21:26.054861 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" event={"ID":"637b38c4-c723-4e98-afd3-897f73d13259","Type":"ContainerDied","Data":"b15e56fc8da0fd43848fb0da5be7d1187d8223befe8dac7c6fb0655f9b0c4500"} Oct 07 00:21:26 crc kubenswrapper[4791]: I1007 00:21:26.061938 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr" event={"ID":"5a672a12-8d07-4ce4-a94e-b3e66473f35c","Type":"ContainerStarted","Data":"881f31046ac28215f3527bb6738de9ea038b0b766c84177b998007766a4f4459"} Oct 07 00:21:26 crc kubenswrapper[4791]: I1007 00:21:26.093771 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-cc5f78dfc-bcfdp" Oct 07 00:21:26 crc kubenswrapper[4791]: I1007 00:21:26.174753 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr" podStartSLOduration=3.127586989 podStartE2EDuration="20.174734525s" podCreationTimestamp="2025-10-07 00:21:06 +0000 UTC" firstStartedPulling="2025-10-07 00:21:07.45173967 +0000 UTC m=+594.047677321" lastFinishedPulling="2025-10-07 00:21:24.498887206 +0000 UTC m=+611.094824857" observedRunningTime="2025-10-07 00:21:26.172218911 +0000 UTC m=+612.768156562" watchObservedRunningTime="2025-10-07 00:21:26.174734525 +0000 UTC m=+612.770672176" Oct 07 00:21:27 crc kubenswrapper[4791]: I1007 00:21:27.070069 4791 generic.go:334] "Generic (PLEG): container finished" podID="637b38c4-c723-4e98-afd3-897f73d13259" containerID="d1c0a346a1561a16ac7cce5f360112cffbc7fc6cc7fa85bc1486ffc53625ca85" exitCode=0 Oct 07 00:21:27 crc kubenswrapper[4791]: I1007 00:21:27.070117 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" event={"ID":"637b38c4-c723-4e98-afd3-897f73d13259","Type":"ContainerDied","Data":"d1c0a346a1561a16ac7cce5f360112cffbc7fc6cc7fa85bc1486ffc53625ca85"} Oct 07 00:21:29 crc kubenswrapper[4791]: I1007 00:21:29.442061 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" Oct 07 00:21:29 crc kubenswrapper[4791]: I1007 00:21:29.465570 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/637b38c4-c723-4e98-afd3-897f73d13259-bundle\") pod \"637b38c4-c723-4e98-afd3-897f73d13259\" (UID: \"637b38c4-c723-4e98-afd3-897f73d13259\") " Oct 07 00:21:29 crc kubenswrapper[4791]: I1007 00:21:29.465762 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/637b38c4-c723-4e98-afd3-897f73d13259-util\") pod \"637b38c4-c723-4e98-afd3-897f73d13259\" (UID: \"637b38c4-c723-4e98-afd3-897f73d13259\") " Oct 07 00:21:29 crc kubenswrapper[4791]: I1007 00:21:29.465852 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbb5z\" (UniqueName: \"kubernetes.io/projected/637b38c4-c723-4e98-afd3-897f73d13259-kube-api-access-wbb5z\") pod \"637b38c4-c723-4e98-afd3-897f73d13259\" (UID: \"637b38c4-c723-4e98-afd3-897f73d13259\") " Oct 07 00:21:29 crc kubenswrapper[4791]: I1007 00:21:29.467726 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/637b38c4-c723-4e98-afd3-897f73d13259-bundle" (OuterVolumeSpecName: "bundle") pod "637b38c4-c723-4e98-afd3-897f73d13259" (UID: "637b38c4-c723-4e98-afd3-897f73d13259"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:21:29 crc kubenswrapper[4791]: I1007 00:21:29.476363 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/637b38c4-c723-4e98-afd3-897f73d13259-util" (OuterVolumeSpecName: "util") pod "637b38c4-c723-4e98-afd3-897f73d13259" (UID: "637b38c4-c723-4e98-afd3-897f73d13259"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:21:29 crc kubenswrapper[4791]: I1007 00:21:29.476718 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/637b38c4-c723-4e98-afd3-897f73d13259-kube-api-access-wbb5z" (OuterVolumeSpecName: "kube-api-access-wbb5z") pod "637b38c4-c723-4e98-afd3-897f73d13259" (UID: "637b38c4-c723-4e98-afd3-897f73d13259"). InnerVolumeSpecName "kube-api-access-wbb5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:21:29 crc kubenswrapper[4791]: I1007 00:21:29.566972 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbb5z\" (UniqueName: \"kubernetes.io/projected/637b38c4-c723-4e98-afd3-897f73d13259-kube-api-access-wbb5z\") on node \"crc\" DevicePath \"\"" Oct 07 00:21:29 crc kubenswrapper[4791]: I1007 00:21:29.567467 4791 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/637b38c4-c723-4e98-afd3-897f73d13259-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:21:29 crc kubenswrapper[4791]: I1007 00:21:29.567478 4791 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/637b38c4-c723-4e98-afd3-897f73d13259-util\") on node \"crc\" DevicePath \"\"" Oct 07 00:21:30 crc kubenswrapper[4791]: I1007 00:21:30.093667 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" event={"ID":"637b38c4-c723-4e98-afd3-897f73d13259","Type":"ContainerDied","Data":"ae8781021e3c4d104c0cf2baf819bd4e4cf31b392b724b0b3a40f99c6b28c650"} Oct 07 00:21:30 crc kubenswrapper[4791]: I1007 00:21:30.093714 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae8781021e3c4d104c0cf2baf819bd4e4cf31b392b724b0b3a40f99c6b28c650" Oct 07 00:21:30 crc kubenswrapper[4791]: I1007 00:21:30.093794 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj" Oct 07 00:21:30 crc kubenswrapper[4791]: I1007 00:21:30.110927 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-6c6d7577b7-pd95l" event={"ID":"731b2406-13b4-466a-889f-5fc62bf64ed3","Type":"ContainerStarted","Data":"e29d1ee9bf405163b53f9941558a320e0a037bb634aa3f7467d0e6b8e2650511"} Oct 07 00:21:30 crc kubenswrapper[4791]: I1007 00:21:30.150622 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elastic-operator-6c6d7577b7-pd95l" podStartSLOduration=10.770389048 podStartE2EDuration="15.150585973s" podCreationTimestamp="2025-10-07 00:21:15 +0000 UTC" firstStartedPulling="2025-10-07 00:21:25.04999508 +0000 UTC m=+611.645932731" lastFinishedPulling="2025-10-07 00:21:29.430192005 +0000 UTC m=+616.026129656" observedRunningTime="2025-10-07 00:21:30.148461657 +0000 UTC m=+616.744399308" watchObservedRunningTime="2025-10-07 00:21:30.150585973 +0000 UTC m=+616.746523634" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.054614 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Oct 07 00:21:32 crc kubenswrapper[4791]: E1007 00:21:32.055259 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637b38c4-c723-4e98-afd3-897f73d13259" containerName="extract" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.055274 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="637b38c4-c723-4e98-afd3-897f73d13259" containerName="extract" Oct 07 00:21:32 crc kubenswrapper[4791]: E1007 00:21:32.055290 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637b38c4-c723-4e98-afd3-897f73d13259" containerName="util" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.055296 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="637b38c4-c723-4e98-afd3-897f73d13259" containerName="util" Oct 07 00:21:32 crc kubenswrapper[4791]: E1007 00:21:32.055312 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637b38c4-c723-4e98-afd3-897f73d13259" containerName="pull" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.055319 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="637b38c4-c723-4e98-afd3-897f73d13259" containerName="pull" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.055440 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="637b38c4-c723-4e98-afd3-897f73d13259" containerName="extract" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.056361 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.059188 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-remote-ca" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.060123 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-dockercfg-rqwjf" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.061128 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-config" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.061235 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-scripts" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.061373 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-xpack-file-realm" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.061732 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-internal-users" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.063279 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-transport-certs" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.065439 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-unicast-hosts" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.066710 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-http-certs-internal" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.090284 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.099248 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.099560 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.099653 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.099723 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.099794 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.099858 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.099947 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.100033 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.100111 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.100191 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.100269 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.100369 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.100478 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.100554 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.100618 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.201732 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.201793 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.201810 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.201839 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.201863 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.201882 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.201907 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.201935 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.201951 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.201971 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.201991 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.202009 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.202025 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.202049 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.202064 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.202371 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.204530 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.204547 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.204642 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.204689 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.204707 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.205232 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.205302 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.208480 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.209072 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.210925 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.210975 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.231174 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.231923 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.232212 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/c523f120-41c2-4ef3-a9f4-f4b42e971c5b-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"c523f120-41c2-4ef3-a9f4-f4b42e971c5b\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:32 crc kubenswrapper[4791]: I1007 00:21:32.382070 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:21:35 crc kubenswrapper[4791]: I1007 00:21:35.160214 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-8jhw8" event={"ID":"85eafc77-d412-45d0-98bc-412413cc803b","Type":"ContainerStarted","Data":"89cbcadb5b5f891f9f6107c72a4f4a50331bddcdf589476ca84e0a36923a4396"} Oct 07 00:21:35 crc kubenswrapper[4791]: I1007 00:21:35.171178 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Oct 07 00:21:35 crc kubenswrapper[4791]: I1007 00:21:35.183802 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/interconnect-operator-5bb49f789d-8jhw8" podStartSLOduration=13.283304496 podStartE2EDuration="23.18378039s" podCreationTimestamp="2025-10-07 00:21:12 +0000 UTC" firstStartedPulling="2025-10-07 00:21:24.913477314 +0000 UTC m=+611.509414965" lastFinishedPulling="2025-10-07 00:21:34.813953208 +0000 UTC m=+621.409890859" observedRunningTime="2025-10-07 00:21:35.181037301 +0000 UTC m=+621.776974952" watchObservedRunningTime="2025-10-07 00:21:35.18378039 +0000 UTC m=+621.779718041" Oct 07 00:21:35 crc kubenswrapper[4791]: W1007 00:21:35.184466 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc523f120_41c2_4ef3_a9f4_f4b42e971c5b.slice/crio-8a5480d46396be721519eb108f31f882f2518db583a6eec771278c46ea463ac3 WatchSource:0}: Error finding container 8a5480d46396be721519eb108f31f882f2518db583a6eec771278c46ea463ac3: Status 404 returned error can't find the container with id 8a5480d46396be721519eb108f31f882f2518db583a6eec771278c46ea463ac3 Oct 07 00:21:36 crc kubenswrapper[4791]: I1007 00:21:36.170345 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"c523f120-41c2-4ef3-a9f4-f4b42e971c5b","Type":"ContainerStarted","Data":"8a5480d46396be721519eb108f31f882f2518db583a6eec771278c46ea463ac3"} Oct 07 00:21:37 crc kubenswrapper[4791]: I1007 00:21:37.352485 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-54bc95c9fb-q5556" Oct 07 00:21:41 crc kubenswrapper[4791]: I1007 00:21:41.213842 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-7n58g" event={"ID":"f3a541f9-9f16-46d8-bf15-61223084be30","Type":"ContainerStarted","Data":"d91177bca03f89679be2d3b785b92cfd57db39d980292eb93b1dfe817f5c3f4f"} Oct 07 00:21:41 crc kubenswrapper[4791]: I1007 00:21:41.254111 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-7n58g" podStartSLOduration=2.645045224 podStartE2EDuration="35.254088601s" podCreationTimestamp="2025-10-07 00:21:06 +0000 UTC" firstStartedPulling="2025-10-07 00:21:07.377509318 +0000 UTC m=+593.973446969" lastFinishedPulling="2025-10-07 00:21:39.986552695 +0000 UTC m=+626.582490346" observedRunningTime="2025-10-07 00:21:41.252430425 +0000 UTC m=+627.848368086" watchObservedRunningTime="2025-10-07 00:21:41.254088601 +0000 UTC m=+627.850026252" Oct 07 00:21:43 crc kubenswrapper[4791]: I1007 00:21:43.828304 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-22qrs"] Oct 07 00:21:43 crc kubenswrapper[4791]: I1007 00:21:43.829672 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-22qrs" Oct 07 00:21:43 crc kubenswrapper[4791]: I1007 00:21:43.833157 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Oct 07 00:21:43 crc kubenswrapper[4791]: I1007 00:21:43.834576 4791 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-t4d5d" Oct 07 00:21:43 crc kubenswrapper[4791]: I1007 00:21:43.834850 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Oct 07 00:21:43 crc kubenswrapper[4791]: I1007 00:21:43.861747 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-22qrs"] Oct 07 00:21:43 crc kubenswrapper[4791]: I1007 00:21:43.987771 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqhgn\" (UniqueName: \"kubernetes.io/projected/d705f9ed-45b2-4bbd-aa26-83958d2cb13f-kube-api-access-tqhgn\") pod \"cert-manager-operator-controller-manager-96d66748b-22qrs\" (UID: \"d705f9ed-45b2-4bbd-aa26-83958d2cb13f\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-22qrs" Oct 07 00:21:44 crc kubenswrapper[4791]: I1007 00:21:44.089485 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqhgn\" (UniqueName: \"kubernetes.io/projected/d705f9ed-45b2-4bbd-aa26-83958d2cb13f-kube-api-access-tqhgn\") pod \"cert-manager-operator-controller-manager-96d66748b-22qrs\" (UID: \"d705f9ed-45b2-4bbd-aa26-83958d2cb13f\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-22qrs" Oct 07 00:21:44 crc kubenswrapper[4791]: I1007 00:21:44.114704 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqhgn\" (UniqueName: \"kubernetes.io/projected/d705f9ed-45b2-4bbd-aa26-83958d2cb13f-kube-api-access-tqhgn\") pod \"cert-manager-operator-controller-manager-96d66748b-22qrs\" (UID: \"d705f9ed-45b2-4bbd-aa26-83958d2cb13f\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-22qrs" Oct 07 00:21:44 crc kubenswrapper[4791]: I1007 00:21:44.144814 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-22qrs" Oct 07 00:21:55 crc kubenswrapper[4791]: E1007 00:21:55.882032 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="registry.connect.redhat.com/elastic/elasticsearch:7.17.20" Oct 07 00:21:55 crc kubenswrapper[4791]: E1007 00:21:55.882865 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:elastic-internal-init-filesystem,Image:registry.connect.redhat.com/elastic/elasticsearch:7.17.20,Command:[bash -c /mnt/elastic-internal/scripts/prepare-fs.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:NODE_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:spec.nodeName,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:HEADLESS_SERVICE_NAME,Value:elasticsearch-es-default,ValueFrom:nil,},EnvVar{Name:PROBE_PASSWORD_PATH,Value:/mnt/elastic-internal/pod-mounted-users/elastic-internal-probe,ValueFrom:nil,},EnvVar{Name:PROBE_USERNAME,Value:elastic-internal-probe,ValueFrom:nil,},EnvVar{Name:READINESS_PROBE_PROTOCOL,Value:https,ValueFrom:nil,},EnvVar{Name:NSS_SDB_USE_CACHE,Value:no,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:downward-api,ReadOnly:true,MountPath:/mnt/elastic-internal/downward-api,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-bin-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-bin-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-config,ReadOnly:true,MountPath:/mnt/elastic-internal/elasticsearch-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-config-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-config-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-plugins-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-plugins-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-http-certificates,ReadOnly:true,MountPath:/usr/share/elasticsearch/config/http-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-probe-user,ReadOnly:true,MountPath:/mnt/elastic-internal/pod-mounted-users,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-remote-certificate-authorities,ReadOnly:true,MountPath:/usr/share/elasticsearch/config/transport-remote-certs/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-scripts,ReadOnly:true,MountPath:/mnt/elastic-internal/scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-transport-certificates,ReadOnly:true,MountPath:/mnt/elastic-internal/transport-certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-unicast-hosts,ReadOnly:true,MountPath:/mnt/elastic-internal/unicast-hosts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-xpack-file-realm,ReadOnly:true,MountPath:/mnt/elastic-internal/xpack-file-realm,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elasticsearch-data,ReadOnly:false,MountPath:/usr/share/elasticsearch/data,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elasticsearch-logs,ReadOnly:false,MountPath:/usr/share/elasticsearch/logs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tmp-volume,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:*false,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod elasticsearch-es-default-0_service-telemetry(c523f120-41c2-4ef3-a9f4-f4b42e971c5b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 00:21:55 crc kubenswrapper[4791]: E1007 00:21:55.884395 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="c523f120-41c2-4ef3-a9f4-f4b42e971c5b" Oct 07 00:21:56 crc kubenswrapper[4791]: I1007 00:21:56.048169 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-22qrs"] Oct 07 00:21:56 crc kubenswrapper[4791]: W1007 00:21:56.057592 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd705f9ed_45b2_4bbd_aa26_83958d2cb13f.slice/crio-e6cf04ad03e8f9917121ecaeae7f964988b17d400c61fde0b376cfe7f3bd0d39 WatchSource:0}: Error finding container e6cf04ad03e8f9917121ecaeae7f964988b17d400c61fde0b376cfe7f3bd0d39: Status 404 returned error can't find the container with id e6cf04ad03e8f9917121ecaeae7f964988b17d400c61fde0b376cfe7f3bd0d39 Oct 07 00:21:56 crc kubenswrapper[4791]: I1007 00:21:56.319562 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-22qrs" event={"ID":"d705f9ed-45b2-4bbd-aa26-83958d2cb13f","Type":"ContainerStarted","Data":"e6cf04ad03e8f9917121ecaeae7f964988b17d400c61fde0b376cfe7f3bd0d39"} Oct 07 00:21:56 crc kubenswrapper[4791]: E1007 00:21:56.321711 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="c523f120-41c2-4ef3-a9f4-f4b42e971c5b" Oct 07 00:21:56 crc kubenswrapper[4791]: I1007 00:21:56.439875 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Oct 07 00:21:56 crc kubenswrapper[4791]: I1007 00:21:56.476609 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Oct 07 00:21:57 crc kubenswrapper[4791]: E1007 00:21:57.329363 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="c523f120-41c2-4ef3-a9f4-f4b42e971c5b" Oct 07 00:21:58 crc kubenswrapper[4791]: E1007 00:21:58.334813 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="c523f120-41c2-4ef3-a9f4-f4b42e971c5b" Oct 07 00:21:59 crc kubenswrapper[4791]: I1007 00:21:59.340695 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-22qrs" event={"ID":"d705f9ed-45b2-4bbd-aa26-83958d2cb13f","Type":"ContainerStarted","Data":"968122ca73467506c4e3b4524be6c8e6b0af149957dd89d04bf6047b47959d18"} Oct 07 00:21:59 crc kubenswrapper[4791]: I1007 00:21:59.363999 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-22qrs" podStartSLOduration=13.685355539 podStartE2EDuration="16.363974559s" podCreationTimestamp="2025-10-07 00:21:43 +0000 UTC" firstStartedPulling="2025-10-07 00:21:56.061277128 +0000 UTC m=+642.657214779" lastFinishedPulling="2025-10-07 00:21:58.739896148 +0000 UTC m=+645.335833799" observedRunningTime="2025-10-07 00:21:59.358687353 +0000 UTC m=+645.954625024" watchObservedRunningTime="2025-10-07 00:21:59.363974559 +0000 UTC m=+645.959912220" Oct 07 00:22:02 crc kubenswrapper[4791]: I1007 00:22:02.476686 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-lrnqq"] Oct 07 00:22:02 crc kubenswrapper[4791]: I1007 00:22:02.477708 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-lrnqq" Oct 07 00:22:02 crc kubenswrapper[4791]: I1007 00:22:02.479778 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 07 00:22:02 crc kubenswrapper[4791]: I1007 00:22:02.480712 4791 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-477j5" Oct 07 00:22:02 crc kubenswrapper[4791]: I1007 00:22:02.480799 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 07 00:22:02 crc kubenswrapper[4791]: I1007 00:22:02.493155 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-lrnqq"] Oct 07 00:22:02 crc kubenswrapper[4791]: I1007 00:22:02.568213 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jj42c\" (UniqueName: \"kubernetes.io/projected/ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae-kube-api-access-jj42c\") pod \"cert-manager-webhook-d969966f-lrnqq\" (UID: \"ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae\") " pod="cert-manager/cert-manager-webhook-d969966f-lrnqq" Oct 07 00:22:02 crc kubenswrapper[4791]: I1007 00:22:02.568268 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae-bound-sa-token\") pod \"cert-manager-webhook-d969966f-lrnqq\" (UID: \"ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae\") " pod="cert-manager/cert-manager-webhook-d969966f-lrnqq" Oct 07 00:22:02 crc kubenswrapper[4791]: I1007 00:22:02.669337 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jj42c\" (UniqueName: \"kubernetes.io/projected/ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae-kube-api-access-jj42c\") pod \"cert-manager-webhook-d969966f-lrnqq\" (UID: \"ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae\") " pod="cert-manager/cert-manager-webhook-d969966f-lrnqq" Oct 07 00:22:02 crc kubenswrapper[4791]: I1007 00:22:02.669415 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae-bound-sa-token\") pod \"cert-manager-webhook-d969966f-lrnqq\" (UID: \"ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae\") " pod="cert-manager/cert-manager-webhook-d969966f-lrnqq" Oct 07 00:22:02 crc kubenswrapper[4791]: I1007 00:22:02.692311 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae-bound-sa-token\") pod \"cert-manager-webhook-d969966f-lrnqq\" (UID: \"ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae\") " pod="cert-manager/cert-manager-webhook-d969966f-lrnqq" Oct 07 00:22:02 crc kubenswrapper[4791]: I1007 00:22:02.692548 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jj42c\" (UniqueName: \"kubernetes.io/projected/ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae-kube-api-access-jj42c\") pod \"cert-manager-webhook-d969966f-lrnqq\" (UID: \"ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae\") " pod="cert-manager/cert-manager-webhook-d969966f-lrnqq" Oct 07 00:22:02 crc kubenswrapper[4791]: I1007 00:22:02.791995 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-lrnqq" Oct 07 00:22:03 crc kubenswrapper[4791]: I1007 00:22:03.281667 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-lrnqq"] Oct 07 00:22:03 crc kubenswrapper[4791]: W1007 00:22:03.286015 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba95eb3a_f7cd_4e9d_be3c_4e1dd78d8fae.slice/crio-e4a142b2f8bf954b255668d474d5867f7dc5e1deaafb70514ff5cf9870563354 WatchSource:0}: Error finding container e4a142b2f8bf954b255668d474d5867f7dc5e1deaafb70514ff5cf9870563354: Status 404 returned error can't find the container with id e4a142b2f8bf954b255668d474d5867f7dc5e1deaafb70514ff5cf9870563354 Oct 07 00:22:03 crc kubenswrapper[4791]: I1007 00:22:03.366106 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-lrnqq" event={"ID":"ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae","Type":"ContainerStarted","Data":"e4a142b2f8bf954b255668d474d5867f7dc5e1deaafb70514ff5cf9870563354"} Oct 07 00:22:04 crc kubenswrapper[4791]: I1007 00:22:04.699697 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-x797z"] Oct 07 00:22:04 crc kubenswrapper[4791]: I1007 00:22:04.701012 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-x797z" Oct 07 00:22:04 crc kubenswrapper[4791]: I1007 00:22:04.704770 4791 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-tnnmn" Oct 07 00:22:04 crc kubenswrapper[4791]: I1007 00:22:04.720273 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-x797z"] Oct 07 00:22:04 crc kubenswrapper[4791]: I1007 00:22:04.801562 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5l2j4\" (UniqueName: \"kubernetes.io/projected/01fdf850-6e4b-46f1-9a87-651a40d459fc-kube-api-access-5l2j4\") pod \"cert-manager-cainjector-7d9f95dbf-x797z\" (UID: \"01fdf850-6e4b-46f1-9a87-651a40d459fc\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-x797z" Oct 07 00:22:04 crc kubenswrapper[4791]: I1007 00:22:04.801652 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/01fdf850-6e4b-46f1-9a87-651a40d459fc-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-x797z\" (UID: \"01fdf850-6e4b-46f1-9a87-651a40d459fc\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-x797z" Oct 07 00:22:04 crc kubenswrapper[4791]: I1007 00:22:04.903519 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5l2j4\" (UniqueName: \"kubernetes.io/projected/01fdf850-6e4b-46f1-9a87-651a40d459fc-kube-api-access-5l2j4\") pod \"cert-manager-cainjector-7d9f95dbf-x797z\" (UID: \"01fdf850-6e4b-46f1-9a87-651a40d459fc\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-x797z" Oct 07 00:22:04 crc kubenswrapper[4791]: I1007 00:22:04.903626 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/01fdf850-6e4b-46f1-9a87-651a40d459fc-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-x797z\" (UID: \"01fdf850-6e4b-46f1-9a87-651a40d459fc\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-x797z" Oct 07 00:22:04 crc kubenswrapper[4791]: I1007 00:22:04.927542 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/01fdf850-6e4b-46f1-9a87-651a40d459fc-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-x797z\" (UID: \"01fdf850-6e4b-46f1-9a87-651a40d459fc\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-x797z" Oct 07 00:22:04 crc kubenswrapper[4791]: I1007 00:22:04.927780 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5l2j4\" (UniqueName: \"kubernetes.io/projected/01fdf850-6e4b-46f1-9a87-651a40d459fc-kube-api-access-5l2j4\") pod \"cert-manager-cainjector-7d9f95dbf-x797z\" (UID: \"01fdf850-6e4b-46f1-9a87-651a40d459fc\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-x797z" Oct 07 00:22:05 crc kubenswrapper[4791]: I1007 00:22:05.069625 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-x797z" Oct 07 00:22:05 crc kubenswrapper[4791]: I1007 00:22:05.500776 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-x797z"] Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.401859 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-x797z" event={"ID":"01fdf850-6e4b-46f1-9a87-651a40d459fc","Type":"ContainerStarted","Data":"3e0928abac5c49f8896141faa7fe22d287797a4d168d250182bca0e9726af008"} Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.834933 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.836040 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.838357 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-global-ca" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.838652 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-sys-config" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.838748 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-ca" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.839016 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-bhkdz" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.860961 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.950819 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b5a39c39-134f-4480-831a-19deca8e4b21-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.950870 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.950937 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.950975 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.951005 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.951022 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b5a39c39-134f-4480-831a-19deca8e4b21-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.951042 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spbdv\" (UniqueName: \"kubernetes.io/projected/b5a39c39-134f-4480-831a-19deca8e4b21-kube-api-access-spbdv\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.951070 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.951084 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.951110 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b5a39c39-134f-4480-831a-19deca8e4b21-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.951132 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:06 crc kubenswrapper[4791]: I1007 00:22:06.951155 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b5a39c39-134f-4480-831a-19deca8e4b21-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053127 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b5a39c39-134f-4480-831a-19deca8e4b21-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053223 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b5a39c39-134f-4480-831a-19deca8e4b21-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053255 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053290 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053320 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053350 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053376 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b5a39c39-134f-4480-831a-19deca8e4b21-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053377 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b5a39c39-134f-4480-831a-19deca8e4b21-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053426 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spbdv\" (UniqueName: \"kubernetes.io/projected/b5a39c39-134f-4480-831a-19deca8e4b21-kube-api-access-spbdv\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053528 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053547 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053633 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b5a39c39-134f-4480-831a-19deca8e4b21-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053679 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053739 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.053962 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.054011 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.054273 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.054280 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b5a39c39-134f-4480-831a-19deca8e4b21-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.054438 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.054720 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.059271 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.061032 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b5a39c39-134f-4480-831a-19deca8e4b21-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.065199 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b5a39c39-134f-4480-831a-19deca8e4b21-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.079150 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spbdv\" (UniqueName: \"kubernetes.io/projected/b5a39c39-134f-4480-831a-19deca8e4b21-kube-api-access-spbdv\") pod \"service-telemetry-operator-1-build\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:07 crc kubenswrapper[4791]: I1007 00:22:07.199747 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:08 crc kubenswrapper[4791]: I1007 00:22:08.145688 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Oct 07 00:22:08 crc kubenswrapper[4791]: I1007 00:22:08.418039 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"b5a39c39-134f-4480-831a-19deca8e4b21","Type":"ContainerStarted","Data":"fd0b7c26354e19c8c2b6326b883420bbebbcd43acab2c6d63abf25a648eb7c3a"} Oct 07 00:22:08 crc kubenswrapper[4791]: I1007 00:22:08.420207 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-x797z" event={"ID":"01fdf850-6e4b-46f1-9a87-651a40d459fc","Type":"ContainerStarted","Data":"1312db0b0cdfc6a7c8f62b5e52ee93e5b52a700c30f2f39d4261a608dff78ff8"} Oct 07 00:22:08 crc kubenswrapper[4791]: I1007 00:22:08.423249 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-lrnqq" event={"ID":"ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae","Type":"ContainerStarted","Data":"f695bd4c3683b4b85e16bbc4c1650eb043a2dd1282cfa1404b526e322a06dfdf"} Oct 07 00:22:08 crc kubenswrapper[4791]: I1007 00:22:08.423456 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-d969966f-lrnqq" Oct 07 00:22:08 crc kubenswrapper[4791]: I1007 00:22:08.470889 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-d969966f-lrnqq" podStartSLOduration=1.777768139 podStartE2EDuration="6.470866782s" podCreationTimestamp="2025-10-07 00:22:02 +0000 UTC" firstStartedPulling="2025-10-07 00:22:03.288559213 +0000 UTC m=+649.884496864" lastFinishedPulling="2025-10-07 00:22:07.981657856 +0000 UTC m=+654.577595507" observedRunningTime="2025-10-07 00:22:08.467523744 +0000 UTC m=+655.063461395" watchObservedRunningTime="2025-10-07 00:22:08.470866782 +0000 UTC m=+655.066804433" Oct 07 00:22:08 crc kubenswrapper[4791]: I1007 00:22:08.473665 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-x797z" podStartSLOduration=2.007116215 podStartE2EDuration="4.473658104s" podCreationTimestamp="2025-10-07 00:22:04 +0000 UTC" firstStartedPulling="2025-10-07 00:22:05.515274261 +0000 UTC m=+652.111211912" lastFinishedPulling="2025-10-07 00:22:07.98181614 +0000 UTC m=+654.577753801" observedRunningTime="2025-10-07 00:22:08.45337472 +0000 UTC m=+655.049312391" watchObservedRunningTime="2025-10-07 00:22:08.473658104 +0000 UTC m=+655.069595755" Oct 07 00:22:15 crc kubenswrapper[4791]: I1007 00:22:15.506198 4791 generic.go:334] "Generic (PLEG): container finished" podID="b5a39c39-134f-4480-831a-19deca8e4b21" containerID="138d38f14ceb0d3f3587fe4db6bdaff78533175b824ba4bc01a4738bf12cce52" exitCode=0 Oct 07 00:22:15 crc kubenswrapper[4791]: I1007 00:22:15.506308 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"b5a39c39-134f-4480-831a-19deca8e4b21","Type":"ContainerDied","Data":"138d38f14ceb0d3f3587fe4db6bdaff78533175b824ba4bc01a4738bf12cce52"} Oct 07 00:22:15 crc kubenswrapper[4791]: I1007 00:22:15.509842 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"c523f120-41c2-4ef3-a9f4-f4b42e971c5b","Type":"ContainerStarted","Data":"3bd1201af751352bca3d7b92eee7ce2e40a4b2a7e0c3262355c7aefa317bb3d0"} Oct 07 00:22:16 crc kubenswrapper[4791]: I1007 00:22:16.526644 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"b5a39c39-134f-4480-831a-19deca8e4b21","Type":"ContainerStarted","Data":"28bcb49e84948b64d092ce70869dee4a55625f698143540da9620cfb9dbe28a3"} Oct 07 00:22:16 crc kubenswrapper[4791]: I1007 00:22:16.580380 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-1-build" podStartSLOduration=5.189634859 podStartE2EDuration="10.580344871s" podCreationTimestamp="2025-10-07 00:22:06 +0000 UTC" firstStartedPulling="2025-10-07 00:22:08.176346437 +0000 UTC m=+654.772284088" lastFinishedPulling="2025-10-07 00:22:13.567056449 +0000 UTC m=+660.162994100" observedRunningTime="2025-10-07 00:22:16.568626898 +0000 UTC m=+663.164564559" watchObservedRunningTime="2025-10-07 00:22:16.580344871 +0000 UTC m=+663.176282562" Oct 07 00:22:17 crc kubenswrapper[4791]: I1007 00:22:17.274498 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Oct 07 00:22:17 crc kubenswrapper[4791]: I1007 00:22:17.537434 4791 generic.go:334] "Generic (PLEG): container finished" podID="c523f120-41c2-4ef3-a9f4-f4b42e971c5b" containerID="3bd1201af751352bca3d7b92eee7ce2e40a4b2a7e0c3262355c7aefa317bb3d0" exitCode=0 Oct 07 00:22:17 crc kubenswrapper[4791]: I1007 00:22:17.537564 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"c523f120-41c2-4ef3-a9f4-f4b42e971c5b","Type":"ContainerDied","Data":"3bd1201af751352bca3d7b92eee7ce2e40a4b2a7e0c3262355c7aefa317bb3d0"} Oct 07 00:22:17 crc kubenswrapper[4791]: I1007 00:22:17.795173 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-d969966f-lrnqq" Oct 07 00:22:18 crc kubenswrapper[4791]: I1007 00:22:18.545766 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-operator-1-build" podUID="b5a39c39-134f-4480-831a-19deca8e4b21" containerName="docker-build" containerID="cri-o://28bcb49e84948b64d092ce70869dee4a55625f698143540da9620cfb9dbe28a3" gracePeriod=30 Oct 07 00:22:18 crc kubenswrapper[4791]: I1007 00:22:18.982503 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Oct 07 00:22:18 crc kubenswrapper[4791]: I1007 00:22:18.984350 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:18 crc kubenswrapper[4791]: I1007 00:22:18.986995 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-ca" Oct 07 00:22:18 crc kubenswrapper[4791]: I1007 00:22:18.987500 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-sys-config" Oct 07 00:22:18 crc kubenswrapper[4791]: I1007 00:22:18.999335 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:18 crc kubenswrapper[4791]: I1007 00:22:18.999505 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:18 crc kubenswrapper[4791]: I1007 00:22:18.999616 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:18.999745 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:18.999844 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q4zr\" (UniqueName: \"kubernetes.io/projected/13647145-7356-4c86-9cca-2f03087ca908-kube-api-access-7q4zr\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:18.999873 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/13647145-7356-4c86-9cca-2f03087ca908-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:18.999914 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.000019 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.000059 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/13647145-7356-4c86-9cca-2f03087ca908-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.000082 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/13647145-7356-4c86-9cca-2f03087ca908-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.000104 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/13647145-7356-4c86-9cca-2f03087ca908-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.000163 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.001808 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-global-ca" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.009349 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.101810 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.103566 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.103697 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q4zr\" (UniqueName: \"kubernetes.io/projected/13647145-7356-4c86-9cca-2f03087ca908-kube-api-access-7q4zr\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.103726 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/13647145-7356-4c86-9cca-2f03087ca908-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.103789 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.103814 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.103997 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.104118 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/13647145-7356-4c86-9cca-2f03087ca908-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.104170 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/13647145-7356-4c86-9cca-2f03087ca908-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.104204 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/13647145-7356-4c86-9cca-2f03087ca908-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.104290 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.104388 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.104517 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.104742 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.104755 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.104781 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/13647145-7356-4c86-9cca-2f03087ca908-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.104122 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.104975 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/13647145-7356-4c86-9cca-2f03087ca908-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.105318 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.105584 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.105743 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.113837 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/13647145-7356-4c86-9cca-2f03087ca908-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.114929 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/13647145-7356-4c86-9cca-2f03087ca908-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.131360 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q4zr\" (UniqueName: \"kubernetes.io/projected/13647145-7356-4c86-9cca-2f03087ca908-kube-api-access-7q4zr\") pod \"service-telemetry-operator-2-build\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:19 crc kubenswrapper[4791]: I1007 00:22:19.307256 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:22:20 crc kubenswrapper[4791]: I1007 00:22:20.281550 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Oct 07 00:22:20 crc kubenswrapper[4791]: W1007 00:22:20.294234 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13647145_7356_4c86_9cca_2f03087ca908.slice/crio-3f6542f963900f9bec79e2028f8d40d211a1b4a715e55e2e853be1654f5b8f9b WatchSource:0}: Error finding container 3f6542f963900f9bec79e2028f8d40d211a1b4a715e55e2e853be1654f5b8f9b: Status 404 returned error can't find the container with id 3f6542f963900f9bec79e2028f8d40d211a1b4a715e55e2e853be1654f5b8f9b Oct 07 00:22:20 crc kubenswrapper[4791]: I1007 00:22:20.559538 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"13647145-7356-4c86-9cca-2f03087ca908","Type":"ContainerStarted","Data":"3f6542f963900f9bec79e2028f8d40d211a1b4a715e55e2e853be1654f5b8f9b"} Oct 07 00:22:20 crc kubenswrapper[4791]: I1007 00:22:20.850016 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-7mggb"] Oct 07 00:22:20 crc kubenswrapper[4791]: I1007 00:22:20.854043 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-7mggb" Oct 07 00:22:20 crc kubenswrapper[4791]: I1007 00:22:20.863503 4791 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-jsvl4" Oct 07 00:22:20 crc kubenswrapper[4791]: I1007 00:22:20.871322 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-7mggb"] Oct 07 00:22:20 crc kubenswrapper[4791]: I1007 00:22:20.938361 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-7mggb\" (UID: \"a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce\") " pod="cert-manager/cert-manager-7d4cc89fcb-7mggb" Oct 07 00:22:20 crc kubenswrapper[4791]: I1007 00:22:20.938498 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krhw2\" (UniqueName: \"kubernetes.io/projected/a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce-kube-api-access-krhw2\") pod \"cert-manager-7d4cc89fcb-7mggb\" (UID: \"a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce\") " pod="cert-manager/cert-manager-7d4cc89fcb-7mggb" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.039735 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-7mggb\" (UID: \"a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce\") " pod="cert-manager/cert-manager-7d4cc89fcb-7mggb" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.039790 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krhw2\" (UniqueName: \"kubernetes.io/projected/a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce-kube-api-access-krhw2\") pod \"cert-manager-7d4cc89fcb-7mggb\" (UID: \"a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce\") " pod="cert-manager/cert-manager-7d4cc89fcb-7mggb" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.068262 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krhw2\" (UniqueName: \"kubernetes.io/projected/a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce-kube-api-access-krhw2\") pod \"cert-manager-7d4cc89fcb-7mggb\" (UID: \"a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce\") " pod="cert-manager/cert-manager-7d4cc89fcb-7mggb" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.070761 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-7mggb\" (UID: \"a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce\") " pod="cert-manager/cert-manager-7d4cc89fcb-7mggb" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.174789 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-1-build_b5a39c39-134f-4480-831a-19deca8e4b21/docker-build/0.log" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.175377 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.241901 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-buildworkdir\") pod \"b5a39c39-134f-4480-831a-19deca8e4b21\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.241963 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b5a39c39-134f-4480-831a-19deca8e4b21-buildcachedir\") pod \"b5a39c39-134f-4480-831a-19deca8e4b21\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.242005 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b5a39c39-134f-4480-831a-19deca8e4b21-builder-dockercfg-bhkdz-push\") pod \"b5a39c39-134f-4480-831a-19deca8e4b21\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.242064 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b5a39c39-134f-4480-831a-19deca8e4b21-node-pullsecrets\") pod \"b5a39c39-134f-4480-831a-19deca8e4b21\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.242144 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-build-blob-cache\") pod \"b5a39c39-134f-4480-831a-19deca8e4b21\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.242151 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b5a39c39-134f-4480-831a-19deca8e4b21-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "b5a39c39-134f-4480-831a-19deca8e4b21" (UID: "b5a39c39-134f-4480-831a-19deca8e4b21"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.242210 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b5a39c39-134f-4480-831a-19deca8e4b21-builder-dockercfg-bhkdz-pull\") pod \"b5a39c39-134f-4480-831a-19deca8e4b21\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.242259 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b5a39c39-134f-4480-831a-19deca8e4b21-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "b5a39c39-134f-4480-831a-19deca8e4b21" (UID: "b5a39c39-134f-4480-831a-19deca8e4b21"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.242265 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-ca-bundles\") pod \"b5a39c39-134f-4480-831a-19deca8e4b21\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.242378 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spbdv\" (UniqueName: \"kubernetes.io/projected/b5a39c39-134f-4480-831a-19deca8e4b21-kube-api-access-spbdv\") pod \"b5a39c39-134f-4480-831a-19deca8e4b21\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.242539 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-container-storage-root\") pod \"b5a39c39-134f-4480-831a-19deca8e4b21\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.242630 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-proxy-ca-bundles\") pod \"b5a39c39-134f-4480-831a-19deca8e4b21\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.242670 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-container-storage-run\") pod \"b5a39c39-134f-4480-831a-19deca8e4b21\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.242726 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-system-configs\") pod \"b5a39c39-134f-4480-831a-19deca8e4b21\" (UID: \"b5a39c39-134f-4480-831a-19deca8e4b21\") " Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.243030 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "b5a39c39-134f-4480-831a-19deca8e4b21" (UID: "b5a39c39-134f-4480-831a-19deca8e4b21"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.243276 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "b5a39c39-134f-4480-831a-19deca8e4b21" (UID: "b5a39c39-134f-4480-831a-19deca8e4b21"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.244034 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "b5a39c39-134f-4480-831a-19deca8e4b21" (UID: "b5a39c39-134f-4480-831a-19deca8e4b21"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.244426 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "b5a39c39-134f-4480-831a-19deca8e4b21" (UID: "b5a39c39-134f-4480-831a-19deca8e4b21"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.244517 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "b5a39c39-134f-4480-831a-19deca8e4b21" (UID: "b5a39c39-134f-4480-831a-19deca8e4b21"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.244812 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.244848 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.244866 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.244882 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.244897 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b5a39c39-134f-4480-831a-19deca8e4b21-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.244909 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b5a39c39-134f-4480-831a-19deca8e4b21-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.244922 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.244940 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "b5a39c39-134f-4480-831a-19deca8e4b21" (UID: "b5a39c39-134f-4480-831a-19deca8e4b21"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.244990 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "b5a39c39-134f-4480-831a-19deca8e4b21" (UID: "b5a39c39-134f-4480-831a-19deca8e4b21"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.246324 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5a39c39-134f-4480-831a-19deca8e4b21-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "b5a39c39-134f-4480-831a-19deca8e4b21" (UID: "b5a39c39-134f-4480-831a-19deca8e4b21"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.249086 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5a39c39-134f-4480-831a-19deca8e4b21-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "b5a39c39-134f-4480-831a-19deca8e4b21" (UID: "b5a39c39-134f-4480-831a-19deca8e4b21"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.249612 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5a39c39-134f-4480-831a-19deca8e4b21-kube-api-access-spbdv" (OuterVolumeSpecName: "kube-api-access-spbdv") pod "b5a39c39-134f-4480-831a-19deca8e4b21" (UID: "b5a39c39-134f-4480-831a-19deca8e4b21"). InnerVolumeSpecName "kube-api-access-spbdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.336214 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-7mggb" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.346922 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b5a39c39-134f-4480-831a-19deca8e4b21-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.346978 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spbdv\" (UniqueName: \"kubernetes.io/projected/b5a39c39-134f-4480-831a-19deca8e4b21-kube-api-access-spbdv\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.347000 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b5a39c39-134f-4480-831a-19deca8e4b21-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.347022 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b5a39c39-134f-4480-831a-19deca8e4b21-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.347040 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b5a39c39-134f-4480-831a-19deca8e4b21-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.573163 4791 generic.go:334] "Generic (PLEG): container finished" podID="c523f120-41c2-4ef3-a9f4-f4b42e971c5b" containerID="81273c3d7ece36c6cb65d421e6391c18f26f712d5b7f606807fc94d30d1df20b" exitCode=0 Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.573238 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"c523f120-41c2-4ef3-a9f4-f4b42e971c5b","Type":"ContainerDied","Data":"81273c3d7ece36c6cb65d421e6391c18f26f712d5b7f606807fc94d30d1df20b"} Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.577587 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"13647145-7356-4c86-9cca-2f03087ca908","Type":"ContainerStarted","Data":"884f620ae99c068129112134756b0b5082af385e1978c79656de73b66ed0006c"} Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.584442 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-1-build_b5a39c39-134f-4480-831a-19deca8e4b21/docker-build/0.log" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.585348 4791 generic.go:334] "Generic (PLEG): container finished" podID="b5a39c39-134f-4480-831a-19deca8e4b21" containerID="28bcb49e84948b64d092ce70869dee4a55625f698143540da9620cfb9dbe28a3" exitCode=1 Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.585497 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.585448 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"b5a39c39-134f-4480-831a-19deca8e4b21","Type":"ContainerDied","Data":"28bcb49e84948b64d092ce70869dee4a55625f698143540da9620cfb9dbe28a3"} Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.585673 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"b5a39c39-134f-4480-831a-19deca8e4b21","Type":"ContainerDied","Data":"fd0b7c26354e19c8c2b6326b883420bbebbcd43acab2c6d63abf25a648eb7c3a"} Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.585735 4791 scope.go:117] "RemoveContainer" containerID="28bcb49e84948b64d092ce70869dee4a55625f698143540da9620cfb9dbe28a3" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.635957 4791 scope.go:117] "RemoveContainer" containerID="138d38f14ceb0d3f3587fe4db6bdaff78533175b824ba4bc01a4738bf12cce52" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.642393 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-7mggb"] Oct 07 00:22:21 crc kubenswrapper[4791]: W1007 00:22:21.648880 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda48eeb9a_a63d_45e3_b3ae_dae3aeedb6ce.slice/crio-a42d0fd0ce270fe3d7b6b6646415faa1ee1b6378403bddab41400b01a3b5d1fe WatchSource:0}: Error finding container a42d0fd0ce270fe3d7b6b6646415faa1ee1b6378403bddab41400b01a3b5d1fe: Status 404 returned error can't find the container with id a42d0fd0ce270fe3d7b6b6646415faa1ee1b6378403bddab41400b01a3b5d1fe Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.687490 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.692140 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.716248 4791 scope.go:117] "RemoveContainer" containerID="28bcb49e84948b64d092ce70869dee4a55625f698143540da9620cfb9dbe28a3" Oct 07 00:22:21 crc kubenswrapper[4791]: E1007 00:22:21.716834 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28bcb49e84948b64d092ce70869dee4a55625f698143540da9620cfb9dbe28a3\": container with ID starting with 28bcb49e84948b64d092ce70869dee4a55625f698143540da9620cfb9dbe28a3 not found: ID does not exist" containerID="28bcb49e84948b64d092ce70869dee4a55625f698143540da9620cfb9dbe28a3" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.716910 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28bcb49e84948b64d092ce70869dee4a55625f698143540da9620cfb9dbe28a3"} err="failed to get container status \"28bcb49e84948b64d092ce70869dee4a55625f698143540da9620cfb9dbe28a3\": rpc error: code = NotFound desc = could not find container \"28bcb49e84948b64d092ce70869dee4a55625f698143540da9620cfb9dbe28a3\": container with ID starting with 28bcb49e84948b64d092ce70869dee4a55625f698143540da9620cfb9dbe28a3 not found: ID does not exist" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.716950 4791 scope.go:117] "RemoveContainer" containerID="138d38f14ceb0d3f3587fe4db6bdaff78533175b824ba4bc01a4738bf12cce52" Oct 07 00:22:21 crc kubenswrapper[4791]: E1007 00:22:21.717320 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"138d38f14ceb0d3f3587fe4db6bdaff78533175b824ba4bc01a4738bf12cce52\": container with ID starting with 138d38f14ceb0d3f3587fe4db6bdaff78533175b824ba4bc01a4738bf12cce52 not found: ID does not exist" containerID="138d38f14ceb0d3f3587fe4db6bdaff78533175b824ba4bc01a4738bf12cce52" Oct 07 00:22:21 crc kubenswrapper[4791]: I1007 00:22:21.718491 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"138d38f14ceb0d3f3587fe4db6bdaff78533175b824ba4bc01a4738bf12cce52"} err="failed to get container status \"138d38f14ceb0d3f3587fe4db6bdaff78533175b824ba4bc01a4738bf12cce52\": rpc error: code = NotFound desc = could not find container \"138d38f14ceb0d3f3587fe4db6bdaff78533175b824ba4bc01a4738bf12cce52\": container with ID starting with 138d38f14ceb0d3f3587fe4db6bdaff78533175b824ba4bc01a4738bf12cce52 not found: ID does not exist" Oct 07 00:22:22 crc kubenswrapper[4791]: I1007 00:22:22.078947 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5a39c39-134f-4480-831a-19deca8e4b21" path="/var/lib/kubelet/pods/b5a39c39-134f-4480-831a-19deca8e4b21/volumes" Oct 07 00:22:22 crc kubenswrapper[4791]: I1007 00:22:22.597015 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-7mggb" event={"ID":"a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce","Type":"ContainerStarted","Data":"2b687e96526d3c0ce51182b36474e6c885198cdbd4889e309354183f266e115e"} Oct 07 00:22:22 crc kubenswrapper[4791]: I1007 00:22:22.599323 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-7mggb" event={"ID":"a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce","Type":"ContainerStarted","Data":"a42d0fd0ce270fe3d7b6b6646415faa1ee1b6378403bddab41400b01a3b5d1fe"} Oct 07 00:22:22 crc kubenswrapper[4791]: I1007 00:22:22.599464 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"c523f120-41c2-4ef3-a9f4-f4b42e971c5b","Type":"ContainerStarted","Data":"4590e290380d0056d3c48634a9776e4480e8ebab35489ef15dd137596d50e6ba"} Oct 07 00:22:22 crc kubenswrapper[4791]: I1007 00:22:22.599946 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:22:22 crc kubenswrapper[4791]: I1007 00:22:22.627842 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-7d4cc89fcb-7mggb" podStartSLOduration=2.627795774 podStartE2EDuration="2.627795774s" podCreationTimestamp="2025-10-07 00:22:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:22:22.621933112 +0000 UTC m=+669.217870763" watchObservedRunningTime="2025-10-07 00:22:22.627795774 +0000 UTC m=+669.223733425" Oct 07 00:22:22 crc kubenswrapper[4791]: I1007 00:22:22.666554 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elasticsearch-es-default-0" podStartSLOduration=12.307627712 podStartE2EDuration="50.666516888s" podCreationTimestamp="2025-10-07 00:21:32 +0000 UTC" firstStartedPulling="2025-10-07 00:21:35.186643102 +0000 UTC m=+621.782580753" lastFinishedPulling="2025-10-07 00:22:13.545532278 +0000 UTC m=+660.141469929" observedRunningTime="2025-10-07 00:22:22.661336676 +0000 UTC m=+669.257274367" watchObservedRunningTime="2025-10-07 00:22:22.666516888 +0000 UTC m=+669.262454549" Oct 07 00:22:32 crc kubenswrapper[4791]: I1007 00:22:32.499152 4791 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="c523f120-41c2-4ef3-a9f4-f4b42e971c5b" containerName="elasticsearch" probeResult="failure" output=< Oct 07 00:22:32 crc kubenswrapper[4791]: {"timestamp": "2025-10-07T00:22:32+00:00", "message": "readiness probe failed", "curl_rc": "7"} Oct 07 00:22:32 crc kubenswrapper[4791]: > Oct 07 00:22:32 crc kubenswrapper[4791]: I1007 00:22:32.693617 4791 generic.go:334] "Generic (PLEG): container finished" podID="13647145-7356-4c86-9cca-2f03087ca908" containerID="884f620ae99c068129112134756b0b5082af385e1978c79656de73b66ed0006c" exitCode=0 Oct 07 00:22:32 crc kubenswrapper[4791]: I1007 00:22:32.693665 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"13647145-7356-4c86-9cca-2f03087ca908","Type":"ContainerDied","Data":"884f620ae99c068129112134756b0b5082af385e1978c79656de73b66ed0006c"} Oct 07 00:22:33 crc kubenswrapper[4791]: I1007 00:22:33.702868 4791 generic.go:334] "Generic (PLEG): container finished" podID="13647145-7356-4c86-9cca-2f03087ca908" containerID="5d8599a8faa3af26f360b91657921cf18fb37140cb99b991016c11eaeb36cd20" exitCode=0 Oct 07 00:22:33 crc kubenswrapper[4791]: I1007 00:22:33.702916 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"13647145-7356-4c86-9cca-2f03087ca908","Type":"ContainerDied","Data":"5d8599a8faa3af26f360b91657921cf18fb37140cb99b991016c11eaeb36cd20"} Oct 07 00:22:33 crc kubenswrapper[4791]: I1007 00:22:33.770084 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-2-build_13647145-7356-4c86-9cca-2f03087ca908/manage-dockerfile/0.log" Oct 07 00:22:34 crc kubenswrapper[4791]: I1007 00:22:34.724621 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"13647145-7356-4c86-9cca-2f03087ca908","Type":"ContainerStarted","Data":"1294028d319964796a817a86980e950364091bf6f87b778d049afc8d2108457a"} Oct 07 00:22:34 crc kubenswrapper[4791]: I1007 00:22:34.766300 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-2-build" podStartSLOduration=16.766274138 podStartE2EDuration="16.766274138s" podCreationTimestamp="2025-10-07 00:22:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:22:34.76393138 +0000 UTC m=+681.359869071" watchObservedRunningTime="2025-10-07 00:22:34.766274138 +0000 UTC m=+681.362211789" Oct 07 00:22:38 crc kubenswrapper[4791]: I1007 00:22:38.199262 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/elasticsearch-es-default-0" Oct 07 00:22:41 crc kubenswrapper[4791]: I1007 00:22:41.600558 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:22:41 crc kubenswrapper[4791]: I1007 00:22:41.600988 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:22:52 crc kubenswrapper[4791]: I1007 00:22:52.440798 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v8f7d"] Oct 07 00:22:52 crc kubenswrapper[4791]: I1007 00:22:52.441990 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" podUID="88d9e4e8-2e0f-4ede-8b83-94f75153ae8c" containerName="controller-manager" containerID="cri-o://92cdde62e67b9a895a7093cf8b93d48dcdac34d8cc7e40f15e81b2982ba493cd" gracePeriod=30 Oct 07 00:22:52 crc kubenswrapper[4791]: I1007 00:22:52.542303 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b"] Oct 07 00:22:52 crc kubenswrapper[4791]: I1007 00:22:52.542965 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" podUID="9d0a85fd-7c96-4655-b36b-4b2e92506513" containerName="route-controller-manager" containerID="cri-o://6dce6e7cd8393ee1cb2e486cb375928c06ffb87d9a7929841d952bae1e6a7e69" gracePeriod=30 Oct 07 00:22:52 crc kubenswrapper[4791]: I1007 00:22:52.849304 4791 generic.go:334] "Generic (PLEG): container finished" podID="88d9e4e8-2e0f-4ede-8b83-94f75153ae8c" containerID="92cdde62e67b9a895a7093cf8b93d48dcdac34d8cc7e40f15e81b2982ba493cd" exitCode=0 Oct 07 00:22:52 crc kubenswrapper[4791]: I1007 00:22:52.849428 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" event={"ID":"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c","Type":"ContainerDied","Data":"92cdde62e67b9a895a7093cf8b93d48dcdac34d8cc7e40f15e81b2982ba493cd"} Oct 07 00:22:52 crc kubenswrapper[4791]: I1007 00:22:52.852390 4791 generic.go:334] "Generic (PLEG): container finished" podID="9d0a85fd-7c96-4655-b36b-4b2e92506513" containerID="6dce6e7cd8393ee1cb2e486cb375928c06ffb87d9a7929841d952bae1e6a7e69" exitCode=0 Oct 07 00:22:52 crc kubenswrapper[4791]: I1007 00:22:52.852476 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" event={"ID":"9d0a85fd-7c96-4655-b36b-4b2e92506513","Type":"ContainerDied","Data":"6dce6e7cd8393ee1cb2e486cb375928c06ffb87d9a7929841d952bae1e6a7e69"} Oct 07 00:22:52 crc kubenswrapper[4791]: I1007 00:22:52.914716 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:22:52 crc kubenswrapper[4791]: I1007 00:22:52.979242 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.040888 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-proxy-ca-bundles\") pod \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.040939 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pz94\" (UniqueName: \"kubernetes.io/projected/9d0a85fd-7c96-4655-b36b-4b2e92506513-kube-api-access-4pz94\") pod \"9d0a85fd-7c96-4655-b36b-4b2e92506513\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.040968 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d0a85fd-7c96-4655-b36b-4b2e92506513-config\") pod \"9d0a85fd-7c96-4655-b36b-4b2e92506513\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.041056 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwrdf\" (UniqueName: \"kubernetes.io/projected/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-kube-api-access-hwrdf\") pod \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.041081 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d0a85fd-7c96-4655-b36b-4b2e92506513-serving-cert\") pod \"9d0a85fd-7c96-4655-b36b-4b2e92506513\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.041122 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-client-ca\") pod \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.041162 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9d0a85fd-7c96-4655-b36b-4b2e92506513-client-ca\") pod \"9d0a85fd-7c96-4655-b36b-4b2e92506513\" (UID: \"9d0a85fd-7c96-4655-b36b-4b2e92506513\") " Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.041181 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-serving-cert\") pod \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.041238 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-config\") pod \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\" (UID: \"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c\") " Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.042197 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-client-ca" (OuterVolumeSpecName: "client-ca") pod "88d9e4e8-2e0f-4ede-8b83-94f75153ae8c" (UID: "88d9e4e8-2e0f-4ede-8b83-94f75153ae8c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.042265 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "88d9e4e8-2e0f-4ede-8b83-94f75153ae8c" (UID: "88d9e4e8-2e0f-4ede-8b83-94f75153ae8c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.042318 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-config" (OuterVolumeSpecName: "config") pod "88d9e4e8-2e0f-4ede-8b83-94f75153ae8c" (UID: "88d9e4e8-2e0f-4ede-8b83-94f75153ae8c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.042526 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d0a85fd-7c96-4655-b36b-4b2e92506513-client-ca" (OuterVolumeSpecName: "client-ca") pod "9d0a85fd-7c96-4655-b36b-4b2e92506513" (UID: "9d0a85fd-7c96-4655-b36b-4b2e92506513"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.042814 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d0a85fd-7c96-4655-b36b-4b2e92506513-config" (OuterVolumeSpecName: "config") pod "9d0a85fd-7c96-4655-b36b-4b2e92506513" (UID: "9d0a85fd-7c96-4655-b36b-4b2e92506513"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.050132 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d0a85fd-7c96-4655-b36b-4b2e92506513-kube-api-access-4pz94" (OuterVolumeSpecName: "kube-api-access-4pz94") pod "9d0a85fd-7c96-4655-b36b-4b2e92506513" (UID: "9d0a85fd-7c96-4655-b36b-4b2e92506513"). InnerVolumeSpecName "kube-api-access-4pz94". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.050159 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d0a85fd-7c96-4655-b36b-4b2e92506513-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d0a85fd-7c96-4655-b36b-4b2e92506513" (UID: "9d0a85fd-7c96-4655-b36b-4b2e92506513"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.050191 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-kube-api-access-hwrdf" (OuterVolumeSpecName: "kube-api-access-hwrdf") pod "88d9e4e8-2e0f-4ede-8b83-94f75153ae8c" (UID: "88d9e4e8-2e0f-4ede-8b83-94f75153ae8c"). InnerVolumeSpecName "kube-api-access-hwrdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.052948 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "88d9e4e8-2e0f-4ede-8b83-94f75153ae8c" (UID: "88d9e4e8-2e0f-4ede-8b83-94f75153ae8c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.142895 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.143230 4791 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.143373 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pz94\" (UniqueName: \"kubernetes.io/projected/9d0a85fd-7c96-4655-b36b-4b2e92506513-kube-api-access-4pz94\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.143902 4791 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d0a85fd-7c96-4655-b36b-4b2e92506513-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.143990 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwrdf\" (UniqueName: \"kubernetes.io/projected/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-kube-api-access-hwrdf\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.144064 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d0a85fd-7c96-4655-b36b-4b2e92506513-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.144147 4791 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.144221 4791 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.144661 4791 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9d0a85fd-7c96-4655-b36b-4b2e92506513-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.722690 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-db5d95fcd-lz7zq"] Oct 07 00:22:53 crc kubenswrapper[4791]: E1007 00:22:53.723111 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88d9e4e8-2e0f-4ede-8b83-94f75153ae8c" containerName="controller-manager" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.723135 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="88d9e4e8-2e0f-4ede-8b83-94f75153ae8c" containerName="controller-manager" Oct 07 00:22:53 crc kubenswrapper[4791]: E1007 00:22:53.723163 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5a39c39-134f-4480-831a-19deca8e4b21" containerName="docker-build" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.723171 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5a39c39-134f-4480-831a-19deca8e4b21" containerName="docker-build" Oct 07 00:22:53 crc kubenswrapper[4791]: E1007 00:22:53.723187 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5a39c39-134f-4480-831a-19deca8e4b21" containerName="manage-dockerfile" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.723196 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5a39c39-134f-4480-831a-19deca8e4b21" containerName="manage-dockerfile" Oct 07 00:22:53 crc kubenswrapper[4791]: E1007 00:22:53.723205 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d0a85fd-7c96-4655-b36b-4b2e92506513" containerName="route-controller-manager" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.723213 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d0a85fd-7c96-4655-b36b-4b2e92506513" containerName="route-controller-manager" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.723350 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d0a85fd-7c96-4655-b36b-4b2e92506513" containerName="route-controller-manager" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.723380 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5a39c39-134f-4480-831a-19deca8e4b21" containerName="docker-build" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.723391 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="88d9e4e8-2e0f-4ede-8b83-94f75153ae8c" containerName="controller-manager" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.724360 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.726906 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc"] Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.728037 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.745973 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-db5d95fcd-lz7zq"] Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.749637 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc"] Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.753786 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5spg\" (UniqueName: \"kubernetes.io/projected/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-kube-api-access-r5spg\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.753825 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-serving-cert\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.753868 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-client-ca\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.753890 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-config\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.753910 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-proxy-ca-bundles\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.855610 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2320763a-dd3a-4257-9fa0-5f92ed0dfdd5-config\") pod \"route-controller-manager-596bd95445-xrtbc\" (UID: \"2320763a-dd3a-4257-9fa0-5f92ed0dfdd5\") " pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.855683 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp8v5\" (UniqueName: \"kubernetes.io/projected/2320763a-dd3a-4257-9fa0-5f92ed0dfdd5-kube-api-access-rp8v5\") pod \"route-controller-manager-596bd95445-xrtbc\" (UID: \"2320763a-dd3a-4257-9fa0-5f92ed0dfdd5\") " pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.855745 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5spg\" (UniqueName: \"kubernetes.io/projected/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-kube-api-access-r5spg\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.855780 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-serving-cert\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.855806 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2320763a-dd3a-4257-9fa0-5f92ed0dfdd5-client-ca\") pod \"route-controller-manager-596bd95445-xrtbc\" (UID: \"2320763a-dd3a-4257-9fa0-5f92ed0dfdd5\") " pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.855851 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-client-ca\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.855874 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2320763a-dd3a-4257-9fa0-5f92ed0dfdd5-serving-cert\") pod \"route-controller-manager-596bd95445-xrtbc\" (UID: \"2320763a-dd3a-4257-9fa0-5f92ed0dfdd5\") " pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.855897 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-config\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.855923 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-proxy-ca-bundles\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.857377 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-client-ca\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.858029 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-config\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.858198 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-proxy-ca-bundles\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.861590 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" event={"ID":"9d0a85fd-7c96-4655-b36b-4b2e92506513","Type":"ContainerDied","Data":"f7caa57e113ee48027a9800c6e31ea37b14209a9dd699e407ea305e6388081ea"} Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.861839 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.862061 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-serving-cert\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.863423 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" event={"ID":"88d9e4e8-2e0f-4ede-8b83-94f75153ae8c","Type":"ContainerDied","Data":"d35222976507e135690133a2c6e12b80a5817a7865abdce64d2056e543e307c0"} Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.863490 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-v8f7d" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.862283 4791 scope.go:117] "RemoveContainer" containerID="6dce6e7cd8393ee1cb2e486cb375928c06ffb87d9a7929841d952bae1e6a7e69" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.881275 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5spg\" (UniqueName: \"kubernetes.io/projected/8895bc5b-2393-41fa-b33f-d1b75c65b3a1-kube-api-access-r5spg\") pod \"controller-manager-db5d95fcd-lz7zq\" (UID: \"8895bc5b-2393-41fa-b33f-d1b75c65b3a1\") " pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.933351 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v8f7d"] Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.933524 4791 scope.go:117] "RemoveContainer" containerID="92cdde62e67b9a895a7093cf8b93d48dcdac34d8cc7e40f15e81b2982ba493cd" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.939782 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v8f7d"] Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.948667 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b"] Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.953354 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-sw84b"] Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.957230 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2320763a-dd3a-4257-9fa0-5f92ed0dfdd5-client-ca\") pod \"route-controller-manager-596bd95445-xrtbc\" (UID: \"2320763a-dd3a-4257-9fa0-5f92ed0dfdd5\") " pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.957301 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2320763a-dd3a-4257-9fa0-5f92ed0dfdd5-serving-cert\") pod \"route-controller-manager-596bd95445-xrtbc\" (UID: \"2320763a-dd3a-4257-9fa0-5f92ed0dfdd5\") " pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.957340 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2320763a-dd3a-4257-9fa0-5f92ed0dfdd5-config\") pod \"route-controller-manager-596bd95445-xrtbc\" (UID: \"2320763a-dd3a-4257-9fa0-5f92ed0dfdd5\") " pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.957374 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp8v5\" (UniqueName: \"kubernetes.io/projected/2320763a-dd3a-4257-9fa0-5f92ed0dfdd5-kube-api-access-rp8v5\") pod \"route-controller-manager-596bd95445-xrtbc\" (UID: \"2320763a-dd3a-4257-9fa0-5f92ed0dfdd5\") " pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.958813 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2320763a-dd3a-4257-9fa0-5f92ed0dfdd5-client-ca\") pod \"route-controller-manager-596bd95445-xrtbc\" (UID: \"2320763a-dd3a-4257-9fa0-5f92ed0dfdd5\") " pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.963663 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2320763a-dd3a-4257-9fa0-5f92ed0dfdd5-config\") pod \"route-controller-manager-596bd95445-xrtbc\" (UID: \"2320763a-dd3a-4257-9fa0-5f92ed0dfdd5\") " pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.971979 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2320763a-dd3a-4257-9fa0-5f92ed0dfdd5-serving-cert\") pod \"route-controller-manager-596bd95445-xrtbc\" (UID: \"2320763a-dd3a-4257-9fa0-5f92ed0dfdd5\") " pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:53 crc kubenswrapper[4791]: I1007 00:22:53.999670 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp8v5\" (UniqueName: \"kubernetes.io/projected/2320763a-dd3a-4257-9fa0-5f92ed0dfdd5-kube-api-access-rp8v5\") pod \"route-controller-manager-596bd95445-xrtbc\" (UID: \"2320763a-dd3a-4257-9fa0-5f92ed0dfdd5\") " pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.054664 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.068741 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.076072 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88d9e4e8-2e0f-4ede-8b83-94f75153ae8c" path="/var/lib/kubelet/pods/88d9e4e8-2e0f-4ede-8b83-94f75153ae8c/volumes" Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.076608 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d0a85fd-7c96-4655-b36b-4b2e92506513" path="/var/lib/kubelet/pods/9d0a85fd-7c96-4655-b36b-4b2e92506513/volumes" Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.461728 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-db5d95fcd-lz7zq"] Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.511827 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc"] Oct 07 00:22:54 crc kubenswrapper[4791]: W1007 00:22:54.541181 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2320763a_dd3a_4257_9fa0_5f92ed0dfdd5.slice/crio-2f24010d4449707447f1415e6337b975799a2464a611aeaa8ca9ba790619d6fb WatchSource:0}: Error finding container 2f24010d4449707447f1415e6337b975799a2464a611aeaa8ca9ba790619d6fb: Status 404 returned error can't find the container with id 2f24010d4449707447f1415e6337b975799a2464a611aeaa8ca9ba790619d6fb Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.874318 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" event={"ID":"8895bc5b-2393-41fa-b33f-d1b75c65b3a1","Type":"ContainerStarted","Data":"b0903061cf6dfbc2586973fbbbe27d8891c4c354024f965eb39ebac7fdd88e1e"} Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.874374 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" event={"ID":"8895bc5b-2393-41fa-b33f-d1b75c65b3a1","Type":"ContainerStarted","Data":"15b3537e5686f13e2c1f018b33be56f574dd238042f62570dd38684acfa94d21"} Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.874576 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.876932 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" event={"ID":"2320763a-dd3a-4257-9fa0-5f92ed0dfdd5","Type":"ContainerStarted","Data":"065d892c91e3735d23c27c509614e11d7824e37f7fdcb5532c1a03634e2361ff"} Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.876986 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" event={"ID":"2320763a-dd3a-4257-9fa0-5f92ed0dfdd5","Type":"ContainerStarted","Data":"2f24010d4449707447f1415e6337b975799a2464a611aeaa8ca9ba790619d6fb"} Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.877150 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.882241 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.898000 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-db5d95fcd-lz7zq" podStartSLOduration=2.897978388 podStartE2EDuration="2.897978388s" podCreationTimestamp="2025-10-07 00:22:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:22:54.897149213 +0000 UTC m=+701.493086864" watchObservedRunningTime="2025-10-07 00:22:54.897978388 +0000 UTC m=+701.493916039" Oct 07 00:22:54 crc kubenswrapper[4791]: I1007 00:22:54.935028 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" podStartSLOduration=2.935008832 podStartE2EDuration="2.935008832s" podCreationTimestamp="2025-10-07 00:22:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:22:54.929324595 +0000 UTC m=+701.525262246" watchObservedRunningTime="2025-10-07 00:22:54.935008832 +0000 UTC m=+701.530946483" Oct 07 00:22:55 crc kubenswrapper[4791]: I1007 00:22:55.254295 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-596bd95445-xrtbc" Oct 07 00:23:01 crc kubenswrapper[4791]: I1007 00:23:01.236593 4791 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 07 00:23:11 crc kubenswrapper[4791]: I1007 00:23:11.600813 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:23:11 crc kubenswrapper[4791]: I1007 00:23:11.601531 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:23:24 crc kubenswrapper[4791]: I1007 00:23:24.989849 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xcjxg"] Oct 07 00:23:24 crc kubenswrapper[4791]: I1007 00:23:24.991415 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:25 crc kubenswrapper[4791]: I1007 00:23:25.010265 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xcjxg"] Oct 07 00:23:25 crc kubenswrapper[4791]: I1007 00:23:25.148369 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnm5t\" (UniqueName: \"kubernetes.io/projected/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-kube-api-access-cnm5t\") pod \"community-operators-xcjxg\" (UID: \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\") " pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:25 crc kubenswrapper[4791]: I1007 00:23:25.148509 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-utilities\") pod \"community-operators-xcjxg\" (UID: \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\") " pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:25 crc kubenswrapper[4791]: I1007 00:23:25.148545 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-catalog-content\") pod \"community-operators-xcjxg\" (UID: \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\") " pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:25 crc kubenswrapper[4791]: I1007 00:23:25.249743 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-utilities\") pod \"community-operators-xcjxg\" (UID: \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\") " pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:25 crc kubenswrapper[4791]: I1007 00:23:25.249814 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-catalog-content\") pod \"community-operators-xcjxg\" (UID: \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\") " pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:25 crc kubenswrapper[4791]: I1007 00:23:25.249863 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnm5t\" (UniqueName: \"kubernetes.io/projected/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-kube-api-access-cnm5t\") pod \"community-operators-xcjxg\" (UID: \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\") " pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:25 crc kubenswrapper[4791]: I1007 00:23:25.250688 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-catalog-content\") pod \"community-operators-xcjxg\" (UID: \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\") " pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:25 crc kubenswrapper[4791]: I1007 00:23:25.250972 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-utilities\") pod \"community-operators-xcjxg\" (UID: \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\") " pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:25 crc kubenswrapper[4791]: I1007 00:23:25.271127 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnm5t\" (UniqueName: \"kubernetes.io/projected/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-kube-api-access-cnm5t\") pod \"community-operators-xcjxg\" (UID: \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\") " pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:25 crc kubenswrapper[4791]: I1007 00:23:25.311019 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:25 crc kubenswrapper[4791]: I1007 00:23:25.799515 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xcjxg"] Oct 07 00:23:26 crc kubenswrapper[4791]: I1007 00:23:26.097218 4791 generic.go:334] "Generic (PLEG): container finished" podID="0e19f8a0-b475-48b2-8f18-c8b83c354dd6" containerID="97cb806a164691364748a0f9389b1730b8b903f581a609d714d43797788d349c" exitCode=0 Oct 07 00:23:26 crc kubenswrapper[4791]: I1007 00:23:26.097279 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xcjxg" event={"ID":"0e19f8a0-b475-48b2-8f18-c8b83c354dd6","Type":"ContainerDied","Data":"97cb806a164691364748a0f9389b1730b8b903f581a609d714d43797788d349c"} Oct 07 00:23:26 crc kubenswrapper[4791]: I1007 00:23:26.097316 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xcjxg" event={"ID":"0e19f8a0-b475-48b2-8f18-c8b83c354dd6","Type":"ContainerStarted","Data":"cb6f5b78d520a024a2d4e645580472849fe6166b6641b98bb025ded3df90b3d0"} Oct 07 00:23:27 crc kubenswrapper[4791]: I1007 00:23:27.106999 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xcjxg" event={"ID":"0e19f8a0-b475-48b2-8f18-c8b83c354dd6","Type":"ContainerStarted","Data":"59f75b849df6ef0305a52685c1e6b7a55b8c8bc39feba310cfb882f49e2a29d6"} Oct 07 00:23:28 crc kubenswrapper[4791]: I1007 00:23:28.113369 4791 generic.go:334] "Generic (PLEG): container finished" podID="0e19f8a0-b475-48b2-8f18-c8b83c354dd6" containerID="59f75b849df6ef0305a52685c1e6b7a55b8c8bc39feba310cfb882f49e2a29d6" exitCode=0 Oct 07 00:23:28 crc kubenswrapper[4791]: I1007 00:23:28.113453 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xcjxg" event={"ID":"0e19f8a0-b475-48b2-8f18-c8b83c354dd6","Type":"ContainerDied","Data":"59f75b849df6ef0305a52685c1e6b7a55b8c8bc39feba310cfb882f49e2a29d6"} Oct 07 00:23:29 crc kubenswrapper[4791]: I1007 00:23:29.120293 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xcjxg" event={"ID":"0e19f8a0-b475-48b2-8f18-c8b83c354dd6","Type":"ContainerStarted","Data":"78b7b2a7ae12cbe37f9d3992cf5ce20c5f28f5cdb64f3eb9ea4d2d7ba4f03d62"} Oct 07 00:23:29 crc kubenswrapper[4791]: I1007 00:23:29.143681 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xcjxg" podStartSLOduration=2.677207843 podStartE2EDuration="5.143658162s" podCreationTimestamp="2025-10-07 00:23:24 +0000 UTC" firstStartedPulling="2025-10-07 00:23:26.099254553 +0000 UTC m=+732.695192204" lastFinishedPulling="2025-10-07 00:23:28.565704872 +0000 UTC m=+735.161642523" observedRunningTime="2025-10-07 00:23:29.138453967 +0000 UTC m=+735.734391618" watchObservedRunningTime="2025-10-07 00:23:29.143658162 +0000 UTC m=+735.739595803" Oct 07 00:23:35 crc kubenswrapper[4791]: I1007 00:23:35.312045 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:35 crc kubenswrapper[4791]: I1007 00:23:35.312767 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:35 crc kubenswrapper[4791]: I1007 00:23:35.349678 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:36 crc kubenswrapper[4791]: I1007 00:23:36.205042 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:36 crc kubenswrapper[4791]: I1007 00:23:36.256278 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xcjxg"] Oct 07 00:23:38 crc kubenswrapper[4791]: I1007 00:23:38.176005 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xcjxg" podUID="0e19f8a0-b475-48b2-8f18-c8b83c354dd6" containerName="registry-server" containerID="cri-o://78b7b2a7ae12cbe37f9d3992cf5ce20c5f28f5cdb64f3eb9ea4d2d7ba4f03d62" gracePeriod=2 Oct 07 00:23:38 crc kubenswrapper[4791]: I1007 00:23:38.611610 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:38 crc kubenswrapper[4791]: I1007 00:23:38.737797 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnm5t\" (UniqueName: \"kubernetes.io/projected/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-kube-api-access-cnm5t\") pod \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\" (UID: \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\") " Oct 07 00:23:38 crc kubenswrapper[4791]: I1007 00:23:38.737883 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-catalog-content\") pod \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\" (UID: \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\") " Oct 07 00:23:38 crc kubenswrapper[4791]: I1007 00:23:38.737933 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-utilities\") pod \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\" (UID: \"0e19f8a0-b475-48b2-8f18-c8b83c354dd6\") " Oct 07 00:23:38 crc kubenswrapper[4791]: I1007 00:23:38.738680 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-utilities" (OuterVolumeSpecName: "utilities") pod "0e19f8a0-b475-48b2-8f18-c8b83c354dd6" (UID: "0e19f8a0-b475-48b2-8f18-c8b83c354dd6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:23:38 crc kubenswrapper[4791]: I1007 00:23:38.750354 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-kube-api-access-cnm5t" (OuterVolumeSpecName: "kube-api-access-cnm5t") pod "0e19f8a0-b475-48b2-8f18-c8b83c354dd6" (UID: "0e19f8a0-b475-48b2-8f18-c8b83c354dd6"). InnerVolumeSpecName "kube-api-access-cnm5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:23:38 crc kubenswrapper[4791]: I1007 00:23:38.796723 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0e19f8a0-b475-48b2-8f18-c8b83c354dd6" (UID: "0e19f8a0-b475-48b2-8f18-c8b83c354dd6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:23:38 crc kubenswrapper[4791]: I1007 00:23:38.839704 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:23:38 crc kubenswrapper[4791]: I1007 00:23:38.839751 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnm5t\" (UniqueName: \"kubernetes.io/projected/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-kube-api-access-cnm5t\") on node \"crc\" DevicePath \"\"" Oct 07 00:23:38 crc kubenswrapper[4791]: I1007 00:23:38.839764 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e19f8a0-b475-48b2-8f18-c8b83c354dd6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.183622 4791 generic.go:334] "Generic (PLEG): container finished" podID="0e19f8a0-b475-48b2-8f18-c8b83c354dd6" containerID="78b7b2a7ae12cbe37f9d3992cf5ce20c5f28f5cdb64f3eb9ea4d2d7ba4f03d62" exitCode=0 Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.183872 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xcjxg" event={"ID":"0e19f8a0-b475-48b2-8f18-c8b83c354dd6","Type":"ContainerDied","Data":"78b7b2a7ae12cbe37f9d3992cf5ce20c5f28f5cdb64f3eb9ea4d2d7ba4f03d62"} Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.183912 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xcjxg" event={"ID":"0e19f8a0-b475-48b2-8f18-c8b83c354dd6","Type":"ContainerDied","Data":"cb6f5b78d520a024a2d4e645580472849fe6166b6641b98bb025ded3df90b3d0"} Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.183940 4791 scope.go:117] "RemoveContainer" containerID="78b7b2a7ae12cbe37f9d3992cf5ce20c5f28f5cdb64f3eb9ea4d2d7ba4f03d62" Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.183973 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xcjxg" Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.203148 4791 scope.go:117] "RemoveContainer" containerID="59f75b849df6ef0305a52685c1e6b7a55b8c8bc39feba310cfb882f49e2a29d6" Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.220699 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xcjxg"] Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.227514 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xcjxg"] Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.230794 4791 scope.go:117] "RemoveContainer" containerID="97cb806a164691364748a0f9389b1730b8b903f581a609d714d43797788d349c" Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.263077 4791 scope.go:117] "RemoveContainer" containerID="78b7b2a7ae12cbe37f9d3992cf5ce20c5f28f5cdb64f3eb9ea4d2d7ba4f03d62" Oct 07 00:23:39 crc kubenswrapper[4791]: E1007 00:23:39.263893 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78b7b2a7ae12cbe37f9d3992cf5ce20c5f28f5cdb64f3eb9ea4d2d7ba4f03d62\": container with ID starting with 78b7b2a7ae12cbe37f9d3992cf5ce20c5f28f5cdb64f3eb9ea4d2d7ba4f03d62 not found: ID does not exist" containerID="78b7b2a7ae12cbe37f9d3992cf5ce20c5f28f5cdb64f3eb9ea4d2d7ba4f03d62" Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.263955 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78b7b2a7ae12cbe37f9d3992cf5ce20c5f28f5cdb64f3eb9ea4d2d7ba4f03d62"} err="failed to get container status \"78b7b2a7ae12cbe37f9d3992cf5ce20c5f28f5cdb64f3eb9ea4d2d7ba4f03d62\": rpc error: code = NotFound desc = could not find container \"78b7b2a7ae12cbe37f9d3992cf5ce20c5f28f5cdb64f3eb9ea4d2d7ba4f03d62\": container with ID starting with 78b7b2a7ae12cbe37f9d3992cf5ce20c5f28f5cdb64f3eb9ea4d2d7ba4f03d62 not found: ID does not exist" Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.263993 4791 scope.go:117] "RemoveContainer" containerID="59f75b849df6ef0305a52685c1e6b7a55b8c8bc39feba310cfb882f49e2a29d6" Oct 07 00:23:39 crc kubenswrapper[4791]: E1007 00:23:39.264763 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59f75b849df6ef0305a52685c1e6b7a55b8c8bc39feba310cfb882f49e2a29d6\": container with ID starting with 59f75b849df6ef0305a52685c1e6b7a55b8c8bc39feba310cfb882f49e2a29d6 not found: ID does not exist" containerID="59f75b849df6ef0305a52685c1e6b7a55b8c8bc39feba310cfb882f49e2a29d6" Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.264837 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59f75b849df6ef0305a52685c1e6b7a55b8c8bc39feba310cfb882f49e2a29d6"} err="failed to get container status \"59f75b849df6ef0305a52685c1e6b7a55b8c8bc39feba310cfb882f49e2a29d6\": rpc error: code = NotFound desc = could not find container \"59f75b849df6ef0305a52685c1e6b7a55b8c8bc39feba310cfb882f49e2a29d6\": container with ID starting with 59f75b849df6ef0305a52685c1e6b7a55b8c8bc39feba310cfb882f49e2a29d6 not found: ID does not exist" Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.264877 4791 scope.go:117] "RemoveContainer" containerID="97cb806a164691364748a0f9389b1730b8b903f581a609d714d43797788d349c" Oct 07 00:23:39 crc kubenswrapper[4791]: E1007 00:23:39.265437 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97cb806a164691364748a0f9389b1730b8b903f581a609d714d43797788d349c\": container with ID starting with 97cb806a164691364748a0f9389b1730b8b903f581a609d714d43797788d349c not found: ID does not exist" containerID="97cb806a164691364748a0f9389b1730b8b903f581a609d714d43797788d349c" Oct 07 00:23:39 crc kubenswrapper[4791]: I1007 00:23:39.265485 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97cb806a164691364748a0f9389b1730b8b903f581a609d714d43797788d349c"} err="failed to get container status \"97cb806a164691364748a0f9389b1730b8b903f581a609d714d43797788d349c\": rpc error: code = NotFound desc = could not find container \"97cb806a164691364748a0f9389b1730b8b903f581a609d714d43797788d349c\": container with ID starting with 97cb806a164691364748a0f9389b1730b8b903f581a609d714d43797788d349c not found: ID does not exist" Oct 07 00:23:40 crc kubenswrapper[4791]: I1007 00:23:40.078088 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e19f8a0-b475-48b2-8f18-c8b83c354dd6" path="/var/lib/kubelet/pods/0e19f8a0-b475-48b2-8f18-c8b83c354dd6/volumes" Oct 07 00:23:40 crc kubenswrapper[4791]: I1007 00:23:40.997766 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hwwzh"] Oct 07 00:23:40 crc kubenswrapper[4791]: E1007 00:23:40.998127 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e19f8a0-b475-48b2-8f18-c8b83c354dd6" containerName="registry-server" Oct 07 00:23:40 crc kubenswrapper[4791]: I1007 00:23:40.998146 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e19f8a0-b475-48b2-8f18-c8b83c354dd6" containerName="registry-server" Oct 07 00:23:40 crc kubenswrapper[4791]: E1007 00:23:40.998160 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e19f8a0-b475-48b2-8f18-c8b83c354dd6" containerName="extract-utilities" Oct 07 00:23:40 crc kubenswrapper[4791]: I1007 00:23:40.998168 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e19f8a0-b475-48b2-8f18-c8b83c354dd6" containerName="extract-utilities" Oct 07 00:23:40 crc kubenswrapper[4791]: E1007 00:23:40.998180 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e19f8a0-b475-48b2-8f18-c8b83c354dd6" containerName="extract-content" Oct 07 00:23:40 crc kubenswrapper[4791]: I1007 00:23:40.998188 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e19f8a0-b475-48b2-8f18-c8b83c354dd6" containerName="extract-content" Oct 07 00:23:40 crc kubenswrapper[4791]: I1007 00:23:40.998321 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e19f8a0-b475-48b2-8f18-c8b83c354dd6" containerName="registry-server" Oct 07 00:23:40 crc kubenswrapper[4791]: I1007 00:23:40.999581 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.020728 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hwwzh"] Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.075412 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a88017a-eff9-43e5-b2d0-bad207fb8541-utilities\") pod \"certified-operators-hwwzh\" (UID: \"1a88017a-eff9-43e5-b2d0-bad207fb8541\") " pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.075550 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a88017a-eff9-43e5-b2d0-bad207fb8541-catalog-content\") pod \"certified-operators-hwwzh\" (UID: \"1a88017a-eff9-43e5-b2d0-bad207fb8541\") " pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.075584 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hskx\" (UniqueName: \"kubernetes.io/projected/1a88017a-eff9-43e5-b2d0-bad207fb8541-kube-api-access-4hskx\") pod \"certified-operators-hwwzh\" (UID: \"1a88017a-eff9-43e5-b2d0-bad207fb8541\") " pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.177237 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a88017a-eff9-43e5-b2d0-bad207fb8541-catalog-content\") pod \"certified-operators-hwwzh\" (UID: \"1a88017a-eff9-43e5-b2d0-bad207fb8541\") " pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.177286 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hskx\" (UniqueName: \"kubernetes.io/projected/1a88017a-eff9-43e5-b2d0-bad207fb8541-kube-api-access-4hskx\") pod \"certified-operators-hwwzh\" (UID: \"1a88017a-eff9-43e5-b2d0-bad207fb8541\") " pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.177318 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a88017a-eff9-43e5-b2d0-bad207fb8541-utilities\") pod \"certified-operators-hwwzh\" (UID: \"1a88017a-eff9-43e5-b2d0-bad207fb8541\") " pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.177702 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a88017a-eff9-43e5-b2d0-bad207fb8541-catalog-content\") pod \"certified-operators-hwwzh\" (UID: \"1a88017a-eff9-43e5-b2d0-bad207fb8541\") " pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.177717 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a88017a-eff9-43e5-b2d0-bad207fb8541-utilities\") pod \"certified-operators-hwwzh\" (UID: \"1a88017a-eff9-43e5-b2d0-bad207fb8541\") " pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.197877 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hskx\" (UniqueName: \"kubernetes.io/projected/1a88017a-eff9-43e5-b2d0-bad207fb8541-kube-api-access-4hskx\") pod \"certified-operators-hwwzh\" (UID: \"1a88017a-eff9-43e5-b2d0-bad207fb8541\") " pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.361198 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.600555 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.601116 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.601190 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.602115 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"37631d119a9543621b7ec3462a6ba16de3d5a41d64a99b73d2268d35fabb5173"} pod="openshift-machine-config-operator/machine-config-daemon-h728c" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.602180 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" containerID="cri-o://37631d119a9543621b7ec3462a6ba16de3d5a41d64a99b73d2268d35fabb5173" gracePeriod=600 Oct 07 00:23:41 crc kubenswrapper[4791]: I1007 00:23:41.621999 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hwwzh"] Oct 07 00:23:42 crc kubenswrapper[4791]: I1007 00:23:42.218491 4791 generic.go:334] "Generic (PLEG): container finished" podID="1a88017a-eff9-43e5-b2d0-bad207fb8541" containerID="400ed4a468f4bc29852187d8fe3e935ed330e2fe76fa140a70e80e4d0c3f1813" exitCode=0 Oct 07 00:23:42 crc kubenswrapper[4791]: I1007 00:23:42.218871 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hwwzh" event={"ID":"1a88017a-eff9-43e5-b2d0-bad207fb8541","Type":"ContainerDied","Data":"400ed4a468f4bc29852187d8fe3e935ed330e2fe76fa140a70e80e4d0c3f1813"} Oct 07 00:23:42 crc kubenswrapper[4791]: I1007 00:23:42.218911 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hwwzh" event={"ID":"1a88017a-eff9-43e5-b2d0-bad207fb8541","Type":"ContainerStarted","Data":"22f55044b0998adcd1a477f518abd3c734f61a2a56c48baea2d649095bfa2c3d"} Oct 07 00:23:42 crc kubenswrapper[4791]: I1007 00:23:42.224116 4791 generic.go:334] "Generic (PLEG): container finished" podID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerID="37631d119a9543621b7ec3462a6ba16de3d5a41d64a99b73d2268d35fabb5173" exitCode=0 Oct 07 00:23:42 crc kubenswrapper[4791]: I1007 00:23:42.224167 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerDied","Data":"37631d119a9543621b7ec3462a6ba16de3d5a41d64a99b73d2268d35fabb5173"} Oct 07 00:23:42 crc kubenswrapper[4791]: I1007 00:23:42.224203 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerStarted","Data":"7cfedf539256dfacf0cf2c9e6ad1df35a6f94ed8695670a64b90aaad70e4d317"} Oct 07 00:23:42 crc kubenswrapper[4791]: I1007 00:23:42.224222 4791 scope.go:117] "RemoveContainer" containerID="31ce44ec022d50902d76172a803393157894bbd037a976f1807bc4f95c7c05c8" Oct 07 00:23:43 crc kubenswrapper[4791]: I1007 00:23:43.234071 4791 generic.go:334] "Generic (PLEG): container finished" podID="1a88017a-eff9-43e5-b2d0-bad207fb8541" containerID="a8f81fde97efc4caf468802b7dcc742c62a9e66e81277d32e99bb53214fa30fe" exitCode=0 Oct 07 00:23:43 crc kubenswrapper[4791]: I1007 00:23:43.234218 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hwwzh" event={"ID":"1a88017a-eff9-43e5-b2d0-bad207fb8541","Type":"ContainerDied","Data":"a8f81fde97efc4caf468802b7dcc742c62a9e66e81277d32e99bb53214fa30fe"} Oct 07 00:23:44 crc kubenswrapper[4791]: I1007 00:23:44.244871 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hwwzh" event={"ID":"1a88017a-eff9-43e5-b2d0-bad207fb8541","Type":"ContainerStarted","Data":"435e92d46e1b347919368e905d846bc5abef47c4608aecc1a95020954fc2bd9e"} Oct 07 00:23:44 crc kubenswrapper[4791]: I1007 00:23:44.266048 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hwwzh" podStartSLOduration=2.610960019 podStartE2EDuration="4.266016722s" podCreationTimestamp="2025-10-07 00:23:40 +0000 UTC" firstStartedPulling="2025-10-07 00:23:42.220781643 +0000 UTC m=+748.816719314" lastFinishedPulling="2025-10-07 00:23:43.875838366 +0000 UTC m=+750.471776017" observedRunningTime="2025-10-07 00:23:44.261095805 +0000 UTC m=+750.857033466" watchObservedRunningTime="2025-10-07 00:23:44.266016722 +0000 UTC m=+750.861954373" Oct 07 00:23:47 crc kubenswrapper[4791]: I1007 00:23:47.192972 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pwg6z"] Oct 07 00:23:47 crc kubenswrapper[4791]: I1007 00:23:47.195222 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:47 crc kubenswrapper[4791]: I1007 00:23:47.223523 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pwg6z"] Oct 07 00:23:47 crc kubenswrapper[4791]: I1007 00:23:47.271082 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdab493b-0bb2-4899-92c1-2a3db5d10265-catalog-content\") pod \"redhat-operators-pwg6z\" (UID: \"fdab493b-0bb2-4899-92c1-2a3db5d10265\") " pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:47 crc kubenswrapper[4791]: I1007 00:23:47.271136 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdab493b-0bb2-4899-92c1-2a3db5d10265-utilities\") pod \"redhat-operators-pwg6z\" (UID: \"fdab493b-0bb2-4899-92c1-2a3db5d10265\") " pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:47 crc kubenswrapper[4791]: I1007 00:23:47.271385 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh75s\" (UniqueName: \"kubernetes.io/projected/fdab493b-0bb2-4899-92c1-2a3db5d10265-kube-api-access-fh75s\") pod \"redhat-operators-pwg6z\" (UID: \"fdab493b-0bb2-4899-92c1-2a3db5d10265\") " pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:47 crc kubenswrapper[4791]: I1007 00:23:47.373033 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdab493b-0bb2-4899-92c1-2a3db5d10265-catalog-content\") pod \"redhat-operators-pwg6z\" (UID: \"fdab493b-0bb2-4899-92c1-2a3db5d10265\") " pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:47 crc kubenswrapper[4791]: I1007 00:23:47.373119 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdab493b-0bb2-4899-92c1-2a3db5d10265-utilities\") pod \"redhat-operators-pwg6z\" (UID: \"fdab493b-0bb2-4899-92c1-2a3db5d10265\") " pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:47 crc kubenswrapper[4791]: I1007 00:23:47.373207 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh75s\" (UniqueName: \"kubernetes.io/projected/fdab493b-0bb2-4899-92c1-2a3db5d10265-kube-api-access-fh75s\") pod \"redhat-operators-pwg6z\" (UID: \"fdab493b-0bb2-4899-92c1-2a3db5d10265\") " pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:47 crc kubenswrapper[4791]: I1007 00:23:47.373782 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdab493b-0bb2-4899-92c1-2a3db5d10265-catalog-content\") pod \"redhat-operators-pwg6z\" (UID: \"fdab493b-0bb2-4899-92c1-2a3db5d10265\") " pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:47 crc kubenswrapper[4791]: I1007 00:23:47.373801 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdab493b-0bb2-4899-92c1-2a3db5d10265-utilities\") pod \"redhat-operators-pwg6z\" (UID: \"fdab493b-0bb2-4899-92c1-2a3db5d10265\") " pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:47 crc kubenswrapper[4791]: I1007 00:23:47.398487 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh75s\" (UniqueName: \"kubernetes.io/projected/fdab493b-0bb2-4899-92c1-2a3db5d10265-kube-api-access-fh75s\") pod \"redhat-operators-pwg6z\" (UID: \"fdab493b-0bb2-4899-92c1-2a3db5d10265\") " pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:47 crc kubenswrapper[4791]: I1007 00:23:47.525018 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:47 crc kubenswrapper[4791]: I1007 00:23:47.971151 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pwg6z"] Oct 07 00:23:48 crc kubenswrapper[4791]: I1007 00:23:48.284829 4791 generic.go:334] "Generic (PLEG): container finished" podID="fdab493b-0bb2-4899-92c1-2a3db5d10265" containerID="4cea1b82cf1f7c164ad9744628b43681186ddc96de5d227f69d43111b006f528" exitCode=0 Oct 07 00:23:48 crc kubenswrapper[4791]: I1007 00:23:48.284897 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwg6z" event={"ID":"fdab493b-0bb2-4899-92c1-2a3db5d10265","Type":"ContainerDied","Data":"4cea1b82cf1f7c164ad9744628b43681186ddc96de5d227f69d43111b006f528"} Oct 07 00:23:48 crc kubenswrapper[4791]: I1007 00:23:48.284943 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwg6z" event={"ID":"fdab493b-0bb2-4899-92c1-2a3db5d10265","Type":"ContainerStarted","Data":"2c12f2dbcca2cccd0a6dd2a78fc683a7cc825afcae3d2d229a43711aaec15835"} Oct 07 00:23:50 crc kubenswrapper[4791]: I1007 00:23:50.300147 4791 generic.go:334] "Generic (PLEG): container finished" podID="fdab493b-0bb2-4899-92c1-2a3db5d10265" containerID="9e4be59141fc06bf75661dd8c6331befb817ac422c343cef6b165b345f6a560d" exitCode=0 Oct 07 00:23:50 crc kubenswrapper[4791]: I1007 00:23:50.300246 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwg6z" event={"ID":"fdab493b-0bb2-4899-92c1-2a3db5d10265","Type":"ContainerDied","Data":"9e4be59141fc06bf75661dd8c6331befb817ac422c343cef6b165b345f6a560d"} Oct 07 00:23:51 crc kubenswrapper[4791]: I1007 00:23:51.327962 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwg6z" event={"ID":"fdab493b-0bb2-4899-92c1-2a3db5d10265","Type":"ContainerStarted","Data":"35ee7e623cb40847077f918005385df88ab79155484d2b55640a7d66d5a999a1"} Oct 07 00:23:51 crc kubenswrapper[4791]: I1007 00:23:51.355126 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pwg6z" podStartSLOduration=1.924182685 podStartE2EDuration="4.355093615s" podCreationTimestamp="2025-10-07 00:23:47 +0000 UTC" firstStartedPulling="2025-10-07 00:23:48.286199236 +0000 UTC m=+754.882136887" lastFinishedPulling="2025-10-07 00:23:50.717110166 +0000 UTC m=+757.313047817" observedRunningTime="2025-10-07 00:23:51.348169669 +0000 UTC m=+757.944107410" watchObservedRunningTime="2025-10-07 00:23:51.355093615 +0000 UTC m=+757.951031296" Oct 07 00:23:51 crc kubenswrapper[4791]: I1007 00:23:51.363762 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:51 crc kubenswrapper[4791]: I1007 00:23:51.365734 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:51 crc kubenswrapper[4791]: I1007 00:23:51.409109 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:52 crc kubenswrapper[4791]: I1007 00:23:52.395223 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:53 crc kubenswrapper[4791]: I1007 00:23:53.389348 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hwwzh"] Oct 07 00:23:55 crc kubenswrapper[4791]: I1007 00:23:55.364976 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hwwzh" podUID="1a88017a-eff9-43e5-b2d0-bad207fb8541" containerName="registry-server" containerID="cri-o://435e92d46e1b347919368e905d846bc5abef47c4608aecc1a95020954fc2bd9e" gracePeriod=2 Oct 07 00:23:56 crc kubenswrapper[4791]: I1007 00:23:56.376098 4791 generic.go:334] "Generic (PLEG): container finished" podID="1a88017a-eff9-43e5-b2d0-bad207fb8541" containerID="435e92d46e1b347919368e905d846bc5abef47c4608aecc1a95020954fc2bd9e" exitCode=0 Oct 07 00:23:56 crc kubenswrapper[4791]: I1007 00:23:56.376175 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hwwzh" event={"ID":"1a88017a-eff9-43e5-b2d0-bad207fb8541","Type":"ContainerDied","Data":"435e92d46e1b347919368e905d846bc5abef47c4608aecc1a95020954fc2bd9e"} Oct 07 00:23:56 crc kubenswrapper[4791]: I1007 00:23:56.964786 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.031727 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a88017a-eff9-43e5-b2d0-bad207fb8541-utilities\") pod \"1a88017a-eff9-43e5-b2d0-bad207fb8541\" (UID: \"1a88017a-eff9-43e5-b2d0-bad207fb8541\") " Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.031843 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hskx\" (UniqueName: \"kubernetes.io/projected/1a88017a-eff9-43e5-b2d0-bad207fb8541-kube-api-access-4hskx\") pod \"1a88017a-eff9-43e5-b2d0-bad207fb8541\" (UID: \"1a88017a-eff9-43e5-b2d0-bad207fb8541\") " Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.031883 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a88017a-eff9-43e5-b2d0-bad207fb8541-catalog-content\") pod \"1a88017a-eff9-43e5-b2d0-bad207fb8541\" (UID: \"1a88017a-eff9-43e5-b2d0-bad207fb8541\") " Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.033118 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a88017a-eff9-43e5-b2d0-bad207fb8541-utilities" (OuterVolumeSpecName: "utilities") pod "1a88017a-eff9-43e5-b2d0-bad207fb8541" (UID: "1a88017a-eff9-43e5-b2d0-bad207fb8541"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.041657 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a88017a-eff9-43e5-b2d0-bad207fb8541-kube-api-access-4hskx" (OuterVolumeSpecName: "kube-api-access-4hskx") pod "1a88017a-eff9-43e5-b2d0-bad207fb8541" (UID: "1a88017a-eff9-43e5-b2d0-bad207fb8541"). InnerVolumeSpecName "kube-api-access-4hskx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.087854 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a88017a-eff9-43e5-b2d0-bad207fb8541-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1a88017a-eff9-43e5-b2d0-bad207fb8541" (UID: "1a88017a-eff9-43e5-b2d0-bad207fb8541"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.134561 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a88017a-eff9-43e5-b2d0-bad207fb8541-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.134626 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hskx\" (UniqueName: \"kubernetes.io/projected/1a88017a-eff9-43e5-b2d0-bad207fb8541-kube-api-access-4hskx\") on node \"crc\" DevicePath \"\"" Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.134651 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a88017a-eff9-43e5-b2d0-bad207fb8541-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.385163 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hwwzh" event={"ID":"1a88017a-eff9-43e5-b2d0-bad207fb8541","Type":"ContainerDied","Data":"22f55044b0998adcd1a477f518abd3c734f61a2a56c48baea2d649095bfa2c3d"} Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.385818 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hwwzh" Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.385855 4791 scope.go:117] "RemoveContainer" containerID="435e92d46e1b347919368e905d846bc5abef47c4608aecc1a95020954fc2bd9e" Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.410233 4791 scope.go:117] "RemoveContainer" containerID="a8f81fde97efc4caf468802b7dcc742c62a9e66e81277d32e99bb53214fa30fe" Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.424486 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hwwzh"] Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.429010 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hwwzh"] Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.448214 4791 scope.go:117] "RemoveContainer" containerID="400ed4a468f4bc29852187d8fe3e935ed330e2fe76fa140a70e80e4d0c3f1813" Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.525244 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.526869 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:57 crc kubenswrapper[4791]: I1007 00:23:57.569045 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:58 crc kubenswrapper[4791]: I1007 00:23:58.081102 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a88017a-eff9-43e5-b2d0-bad207fb8541" path="/var/lib/kubelet/pods/1a88017a-eff9-43e5-b2d0-bad207fb8541/volumes" Oct 07 00:23:58 crc kubenswrapper[4791]: I1007 00:23:58.439149 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:23:59 crc kubenswrapper[4791]: I1007 00:23:59.984135 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pwg6z"] Oct 07 00:24:01 crc kubenswrapper[4791]: I1007 00:24:01.418816 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pwg6z" podUID="fdab493b-0bb2-4899-92c1-2a3db5d10265" containerName="registry-server" containerID="cri-o://35ee7e623cb40847077f918005385df88ab79155484d2b55640a7d66d5a999a1" gracePeriod=2 Oct 07 00:24:01 crc kubenswrapper[4791]: I1007 00:24:01.841172 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:24:01 crc kubenswrapper[4791]: I1007 00:24:01.914443 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fh75s\" (UniqueName: \"kubernetes.io/projected/fdab493b-0bb2-4899-92c1-2a3db5d10265-kube-api-access-fh75s\") pod \"fdab493b-0bb2-4899-92c1-2a3db5d10265\" (UID: \"fdab493b-0bb2-4899-92c1-2a3db5d10265\") " Oct 07 00:24:01 crc kubenswrapper[4791]: I1007 00:24:01.914548 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdab493b-0bb2-4899-92c1-2a3db5d10265-utilities\") pod \"fdab493b-0bb2-4899-92c1-2a3db5d10265\" (UID: \"fdab493b-0bb2-4899-92c1-2a3db5d10265\") " Oct 07 00:24:01 crc kubenswrapper[4791]: I1007 00:24:01.914603 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdab493b-0bb2-4899-92c1-2a3db5d10265-catalog-content\") pod \"fdab493b-0bb2-4899-92c1-2a3db5d10265\" (UID: \"fdab493b-0bb2-4899-92c1-2a3db5d10265\") " Oct 07 00:24:01 crc kubenswrapper[4791]: I1007 00:24:01.915767 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdab493b-0bb2-4899-92c1-2a3db5d10265-utilities" (OuterVolumeSpecName: "utilities") pod "fdab493b-0bb2-4899-92c1-2a3db5d10265" (UID: "fdab493b-0bb2-4899-92c1-2a3db5d10265"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:24:01 crc kubenswrapper[4791]: I1007 00:24:01.936697 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdab493b-0bb2-4899-92c1-2a3db5d10265-kube-api-access-fh75s" (OuterVolumeSpecName: "kube-api-access-fh75s") pod "fdab493b-0bb2-4899-92c1-2a3db5d10265" (UID: "fdab493b-0bb2-4899-92c1-2a3db5d10265"). InnerVolumeSpecName "kube-api-access-fh75s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.012345 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdab493b-0bb2-4899-92c1-2a3db5d10265-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fdab493b-0bb2-4899-92c1-2a3db5d10265" (UID: "fdab493b-0bb2-4899-92c1-2a3db5d10265"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.016633 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdab493b-0bb2-4899-92c1-2a3db5d10265-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.016686 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdab493b-0bb2-4899-92c1-2a3db5d10265-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.016703 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fh75s\" (UniqueName: \"kubernetes.io/projected/fdab493b-0bb2-4899-92c1-2a3db5d10265-kube-api-access-fh75s\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.430532 4791 generic.go:334] "Generic (PLEG): container finished" podID="fdab493b-0bb2-4899-92c1-2a3db5d10265" containerID="35ee7e623cb40847077f918005385df88ab79155484d2b55640a7d66d5a999a1" exitCode=0 Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.430591 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwg6z" event={"ID":"fdab493b-0bb2-4899-92c1-2a3db5d10265","Type":"ContainerDied","Data":"35ee7e623cb40847077f918005385df88ab79155484d2b55640a7d66d5a999a1"} Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.430677 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pwg6z" event={"ID":"fdab493b-0bb2-4899-92c1-2a3db5d10265","Type":"ContainerDied","Data":"2c12f2dbcca2cccd0a6dd2a78fc683a7cc825afcae3d2d229a43711aaec15835"} Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.430682 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pwg6z" Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.430705 4791 scope.go:117] "RemoveContainer" containerID="35ee7e623cb40847077f918005385df88ab79155484d2b55640a7d66d5a999a1" Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.455854 4791 scope.go:117] "RemoveContainer" containerID="9e4be59141fc06bf75661dd8c6331befb817ac422c343cef6b165b345f6a560d" Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.458152 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pwg6z"] Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.466454 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pwg6z"] Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.486151 4791 scope.go:117] "RemoveContainer" containerID="4cea1b82cf1f7c164ad9744628b43681186ddc96de5d227f69d43111b006f528" Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.503224 4791 scope.go:117] "RemoveContainer" containerID="35ee7e623cb40847077f918005385df88ab79155484d2b55640a7d66d5a999a1" Oct 07 00:24:02 crc kubenswrapper[4791]: E1007 00:24:02.504046 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35ee7e623cb40847077f918005385df88ab79155484d2b55640a7d66d5a999a1\": container with ID starting with 35ee7e623cb40847077f918005385df88ab79155484d2b55640a7d66d5a999a1 not found: ID does not exist" containerID="35ee7e623cb40847077f918005385df88ab79155484d2b55640a7d66d5a999a1" Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.504108 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35ee7e623cb40847077f918005385df88ab79155484d2b55640a7d66d5a999a1"} err="failed to get container status \"35ee7e623cb40847077f918005385df88ab79155484d2b55640a7d66d5a999a1\": rpc error: code = NotFound desc = could not find container \"35ee7e623cb40847077f918005385df88ab79155484d2b55640a7d66d5a999a1\": container with ID starting with 35ee7e623cb40847077f918005385df88ab79155484d2b55640a7d66d5a999a1 not found: ID does not exist" Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.504148 4791 scope.go:117] "RemoveContainer" containerID="9e4be59141fc06bf75661dd8c6331befb817ac422c343cef6b165b345f6a560d" Oct 07 00:24:02 crc kubenswrapper[4791]: E1007 00:24:02.504813 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e4be59141fc06bf75661dd8c6331befb817ac422c343cef6b165b345f6a560d\": container with ID starting with 9e4be59141fc06bf75661dd8c6331befb817ac422c343cef6b165b345f6a560d not found: ID does not exist" containerID="9e4be59141fc06bf75661dd8c6331befb817ac422c343cef6b165b345f6a560d" Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.504868 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e4be59141fc06bf75661dd8c6331befb817ac422c343cef6b165b345f6a560d"} err="failed to get container status \"9e4be59141fc06bf75661dd8c6331befb817ac422c343cef6b165b345f6a560d\": rpc error: code = NotFound desc = could not find container \"9e4be59141fc06bf75661dd8c6331befb817ac422c343cef6b165b345f6a560d\": container with ID starting with 9e4be59141fc06bf75661dd8c6331befb817ac422c343cef6b165b345f6a560d not found: ID does not exist" Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.504907 4791 scope.go:117] "RemoveContainer" containerID="4cea1b82cf1f7c164ad9744628b43681186ddc96de5d227f69d43111b006f528" Oct 07 00:24:02 crc kubenswrapper[4791]: E1007 00:24:02.505503 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cea1b82cf1f7c164ad9744628b43681186ddc96de5d227f69d43111b006f528\": container with ID starting with 4cea1b82cf1f7c164ad9744628b43681186ddc96de5d227f69d43111b006f528 not found: ID does not exist" containerID="4cea1b82cf1f7c164ad9744628b43681186ddc96de5d227f69d43111b006f528" Oct 07 00:24:02 crc kubenswrapper[4791]: I1007 00:24:02.505555 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cea1b82cf1f7c164ad9744628b43681186ddc96de5d227f69d43111b006f528"} err="failed to get container status \"4cea1b82cf1f7c164ad9744628b43681186ddc96de5d227f69d43111b006f528\": rpc error: code = NotFound desc = could not find container \"4cea1b82cf1f7c164ad9744628b43681186ddc96de5d227f69d43111b006f528\": container with ID starting with 4cea1b82cf1f7c164ad9744628b43681186ddc96de5d227f69d43111b006f528 not found: ID does not exist" Oct 07 00:24:04 crc kubenswrapper[4791]: I1007 00:24:04.080314 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdab493b-0bb2-4899-92c1-2a3db5d10265" path="/var/lib/kubelet/pods/fdab493b-0bb2-4899-92c1-2a3db5d10265/volumes" Oct 07 00:24:09 crc kubenswrapper[4791]: I1007 00:24:09.482567 4791 generic.go:334] "Generic (PLEG): container finished" podID="13647145-7356-4c86-9cca-2f03087ca908" containerID="1294028d319964796a817a86980e950364091bf6f87b778d049afc8d2108457a" exitCode=0 Oct 07 00:24:09 crc kubenswrapper[4791]: I1007 00:24:09.482648 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"13647145-7356-4c86-9cca-2f03087ca908","Type":"ContainerDied","Data":"1294028d319964796a817a86980e950364091bf6f87b778d049afc8d2108457a"} Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.714570 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.734079 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7q4zr\" (UniqueName: \"kubernetes.io/projected/13647145-7356-4c86-9cca-2f03087ca908-kube-api-access-7q4zr\") pod \"13647145-7356-4c86-9cca-2f03087ca908\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.734143 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-container-storage-run\") pod \"13647145-7356-4c86-9cca-2f03087ca908\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.734167 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/13647145-7356-4c86-9cca-2f03087ca908-node-pullsecrets\") pod \"13647145-7356-4c86-9cca-2f03087ca908\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.734192 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-proxy-ca-bundles\") pod \"13647145-7356-4c86-9cca-2f03087ca908\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.734218 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/13647145-7356-4c86-9cca-2f03087ca908-builder-dockercfg-bhkdz-push\") pod \"13647145-7356-4c86-9cca-2f03087ca908\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.734237 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/13647145-7356-4c86-9cca-2f03087ca908-buildcachedir\") pod \"13647145-7356-4c86-9cca-2f03087ca908\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.734300 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/13647145-7356-4c86-9cca-2f03087ca908-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "13647145-7356-4c86-9cca-2f03087ca908" (UID: "13647145-7356-4c86-9cca-2f03087ca908"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.734433 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/13647145-7356-4c86-9cca-2f03087ca908-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "13647145-7356-4c86-9cca-2f03087ca908" (UID: "13647145-7356-4c86-9cca-2f03087ca908"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.734889 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-container-storage-root\") pod \"13647145-7356-4c86-9cca-2f03087ca908\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.734915 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "13647145-7356-4c86-9cca-2f03087ca908" (UID: "13647145-7356-4c86-9cca-2f03087ca908"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.734940 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/13647145-7356-4c86-9cca-2f03087ca908-builder-dockercfg-bhkdz-pull\") pod \"13647145-7356-4c86-9cca-2f03087ca908\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.734974 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-system-configs\") pod \"13647145-7356-4c86-9cca-2f03087ca908\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.735001 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-build-blob-cache\") pod \"13647145-7356-4c86-9cca-2f03087ca908\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.735027 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-buildworkdir\") pod \"13647145-7356-4c86-9cca-2f03087ca908\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.735077 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-ca-bundles\") pod \"13647145-7356-4c86-9cca-2f03087ca908\" (UID: \"13647145-7356-4c86-9cca-2f03087ca908\") " Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.735368 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/13647145-7356-4c86-9cca-2f03087ca908-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.735385 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.735396 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/13647145-7356-4c86-9cca-2f03087ca908-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.735475 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "13647145-7356-4c86-9cca-2f03087ca908" (UID: "13647145-7356-4c86-9cca-2f03087ca908"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.735979 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "13647145-7356-4c86-9cca-2f03087ca908" (UID: "13647145-7356-4c86-9cca-2f03087ca908"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.742024 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13647145-7356-4c86-9cca-2f03087ca908-kube-api-access-7q4zr" (OuterVolumeSpecName: "kube-api-access-7q4zr") pod "13647145-7356-4c86-9cca-2f03087ca908" (UID: "13647145-7356-4c86-9cca-2f03087ca908"). InnerVolumeSpecName "kube-api-access-7q4zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.746495 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13647145-7356-4c86-9cca-2f03087ca908-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "13647145-7356-4c86-9cca-2f03087ca908" (UID: "13647145-7356-4c86-9cca-2f03087ca908"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.749631 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13647145-7356-4c86-9cca-2f03087ca908-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "13647145-7356-4c86-9cca-2f03087ca908" (UID: "13647145-7356-4c86-9cca-2f03087ca908"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.751382 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "13647145-7356-4c86-9cca-2f03087ca908" (UID: "13647145-7356-4c86-9cca-2f03087ca908"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.780828 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "13647145-7356-4c86-9cca-2f03087ca908" (UID: "13647145-7356-4c86-9cca-2f03087ca908"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.837521 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/13647145-7356-4c86-9cca-2f03087ca908-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.837798 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.837895 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.837954 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/13647145-7356-4c86-9cca-2f03087ca908-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.838016 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7q4zr\" (UniqueName: \"kubernetes.io/projected/13647145-7356-4c86-9cca-2f03087ca908-kube-api-access-7q4zr\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.838115 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.838185 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/13647145-7356-4c86-9cca-2f03087ca908-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:10 crc kubenswrapper[4791]: I1007 00:24:10.942920 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "13647145-7356-4c86-9cca-2f03087ca908" (UID: "13647145-7356-4c86-9cca-2f03087ca908"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:24:11 crc kubenswrapper[4791]: I1007 00:24:11.042207 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:11 crc kubenswrapper[4791]: I1007 00:24:11.501120 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"13647145-7356-4c86-9cca-2f03087ca908","Type":"ContainerDied","Data":"3f6542f963900f9bec79e2028f8d40d211a1b4a715e55e2e853be1654f5b8f9b"} Oct 07 00:24:11 crc kubenswrapper[4791]: I1007 00:24:11.501177 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f6542f963900f9bec79e2028f8d40d211a1b4a715e55e2e853be1654f5b8f9b" Oct 07 00:24:11 crc kubenswrapper[4791]: I1007 00:24:11.501257 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Oct 07 00:24:13 crc kubenswrapper[4791]: I1007 00:24:13.069521 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "13647145-7356-4c86-9cca-2f03087ca908" (UID: "13647145-7356-4c86-9cca-2f03087ca908"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:24:13 crc kubenswrapper[4791]: I1007 00:24:13.076309 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/13647145-7356-4c86-9cca-2f03087ca908-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.687621 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Oct 07 00:24:15 crc kubenswrapper[4791]: E1007 00:24:15.688833 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13647145-7356-4c86-9cca-2f03087ca908" containerName="manage-dockerfile" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.688851 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="13647145-7356-4c86-9cca-2f03087ca908" containerName="manage-dockerfile" Oct 07 00:24:15 crc kubenswrapper[4791]: E1007 00:24:15.688864 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a88017a-eff9-43e5-b2d0-bad207fb8541" containerName="registry-server" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.688872 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a88017a-eff9-43e5-b2d0-bad207fb8541" containerName="registry-server" Oct 07 00:24:15 crc kubenswrapper[4791]: E1007 00:24:15.688882 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdab493b-0bb2-4899-92c1-2a3db5d10265" containerName="extract-content" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.688890 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdab493b-0bb2-4899-92c1-2a3db5d10265" containerName="extract-content" Oct 07 00:24:15 crc kubenswrapper[4791]: E1007 00:24:15.688906 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdab493b-0bb2-4899-92c1-2a3db5d10265" containerName="extract-utilities" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.688913 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdab493b-0bb2-4899-92c1-2a3db5d10265" containerName="extract-utilities" Oct 07 00:24:15 crc kubenswrapper[4791]: E1007 00:24:15.688923 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a88017a-eff9-43e5-b2d0-bad207fb8541" containerName="extract-utilities" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.688930 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a88017a-eff9-43e5-b2d0-bad207fb8541" containerName="extract-utilities" Oct 07 00:24:15 crc kubenswrapper[4791]: E1007 00:24:15.688946 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13647145-7356-4c86-9cca-2f03087ca908" containerName="git-clone" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.688953 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="13647145-7356-4c86-9cca-2f03087ca908" containerName="git-clone" Oct 07 00:24:15 crc kubenswrapper[4791]: E1007 00:24:15.688962 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a88017a-eff9-43e5-b2d0-bad207fb8541" containerName="extract-content" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.688968 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a88017a-eff9-43e5-b2d0-bad207fb8541" containerName="extract-content" Oct 07 00:24:15 crc kubenswrapper[4791]: E1007 00:24:15.688978 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13647145-7356-4c86-9cca-2f03087ca908" containerName="docker-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.688985 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="13647145-7356-4c86-9cca-2f03087ca908" containerName="docker-build" Oct 07 00:24:15 crc kubenswrapper[4791]: E1007 00:24:15.688996 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdab493b-0bb2-4899-92c1-2a3db5d10265" containerName="registry-server" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.689002 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdab493b-0bb2-4899-92c1-2a3db5d10265" containerName="registry-server" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.689161 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="13647145-7356-4c86-9cca-2f03087ca908" containerName="docker-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.689175 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdab493b-0bb2-4899-92c1-2a3db5d10265" containerName="registry-server" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.689190 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a88017a-eff9-43e5-b2d0-bad207fb8541" containerName="registry-server" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.689973 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.692134 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-bhkdz" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.692780 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-1-global-ca" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.693964 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-1-sys-config" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.694304 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-1-ca" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.715139 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.715156 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-proxy-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.715250 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4130eeb6-6324-4546-8684-71909617fc84-buildcachedir\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.715284 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-container-storage-root\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.715312 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-container-storage-run\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.715348 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-system-configs\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.715366 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/4130eeb6-6324-4546-8684-71909617fc84-builder-dockercfg-bhkdz-push\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.715386 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.715496 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgjjv\" (UniqueName: \"kubernetes.io/projected/4130eeb6-6324-4546-8684-71909617fc84-kube-api-access-fgjjv\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.715538 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-buildworkdir\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.715568 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4130eeb6-6324-4546-8684-71909617fc84-node-pullsecrets\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.715606 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/4130eeb6-6324-4546-8684-71909617fc84-builder-dockercfg-bhkdz-pull\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.715655 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-build-blob-cache\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.817339 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-build-blob-cache\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.817448 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-proxy-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.817494 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4130eeb6-6324-4546-8684-71909617fc84-buildcachedir\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.817528 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-container-storage-root\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.817554 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-container-storage-run\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.817595 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/4130eeb6-6324-4546-8684-71909617fc84-builder-dockercfg-bhkdz-push\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.817626 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-system-configs\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.817654 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.817660 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4130eeb6-6324-4546-8684-71909617fc84-buildcachedir\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.817702 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgjjv\" (UniqueName: \"kubernetes.io/projected/4130eeb6-6324-4546-8684-71909617fc84-kube-api-access-fgjjv\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.817774 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-buildworkdir\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.817828 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4130eeb6-6324-4546-8684-71909617fc84-node-pullsecrets\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.817949 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/4130eeb6-6324-4546-8684-71909617fc84-builder-dockercfg-bhkdz-pull\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.818453 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-container-storage-root\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.818488 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-build-blob-cache\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.818802 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-container-storage-run\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.819186 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-system-configs\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.819267 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4130eeb6-6324-4546-8684-71909617fc84-node-pullsecrets\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.820291 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-proxy-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.820344 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.820831 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-buildworkdir\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.828286 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/4130eeb6-6324-4546-8684-71909617fc84-builder-dockercfg-bhkdz-push\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.832195 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/4130eeb6-6324-4546-8684-71909617fc84-builder-dockercfg-bhkdz-pull\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:15 crc kubenswrapper[4791]: I1007 00:24:15.839933 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgjjv\" (UniqueName: \"kubernetes.io/projected/4130eeb6-6324-4546-8684-71909617fc84-kube-api-access-fgjjv\") pod \"smart-gateway-operator-1-build\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:16 crc kubenswrapper[4791]: I1007 00:24:16.014857 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:16 crc kubenswrapper[4791]: I1007 00:24:16.309972 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Oct 07 00:24:16 crc kubenswrapper[4791]: I1007 00:24:16.544078 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"4130eeb6-6324-4546-8684-71909617fc84","Type":"ContainerStarted","Data":"41d0e62e3b57002b1138c3859261d56697aee2e762357d9e98e319301a19b861"} Oct 07 00:24:17 crc kubenswrapper[4791]: I1007 00:24:17.555724 4791 generic.go:334] "Generic (PLEG): container finished" podID="4130eeb6-6324-4546-8684-71909617fc84" containerID="6a641e60a9006d33f9959ec4d2aea03cfdca5c89d1f178275560bcd57f2dfda2" exitCode=0 Oct 07 00:24:17 crc kubenswrapper[4791]: I1007 00:24:17.555842 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"4130eeb6-6324-4546-8684-71909617fc84","Type":"ContainerDied","Data":"6a641e60a9006d33f9959ec4d2aea03cfdca5c89d1f178275560bcd57f2dfda2"} Oct 07 00:24:18 crc kubenswrapper[4791]: I1007 00:24:18.564662 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"4130eeb6-6324-4546-8684-71909617fc84","Type":"ContainerStarted","Data":"e2660238770555971ca8edfc2ba9cf2e0cd51629cb1d131055782990d47bf5f4"} Oct 07 00:24:18 crc kubenswrapper[4791]: I1007 00:24:18.603246 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-1-build" podStartSLOduration=3.603220623 podStartE2EDuration="3.603220623s" podCreationTimestamp="2025-10-07 00:24:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:24:18.595394166 +0000 UTC m=+785.191331827" watchObservedRunningTime="2025-10-07 00:24:18.603220623 +0000 UTC m=+785.199158274" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.381298 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.382517 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/smart-gateway-operator-1-build" podUID="4130eeb6-6324-4546-8684-71909617fc84" containerName="docker-build" containerID="cri-o://e2660238770555971ca8edfc2ba9cf2e0cd51629cb1d131055782990d47bf5f4" gracePeriod=30 Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.615527 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-1-build_4130eeb6-6324-4546-8684-71909617fc84/docker-build/0.log" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.615909 4791 generic.go:334] "Generic (PLEG): container finished" podID="4130eeb6-6324-4546-8684-71909617fc84" containerID="e2660238770555971ca8edfc2ba9cf2e0cd51629cb1d131055782990d47bf5f4" exitCode=1 Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.615951 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"4130eeb6-6324-4546-8684-71909617fc84","Type":"ContainerDied","Data":"e2660238770555971ca8edfc2ba9cf2e0cd51629cb1d131055782990d47bf5f4"} Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.747192 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-1-build_4130eeb6-6324-4546-8684-71909617fc84/docker-build/0.log" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.747649 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.814826 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-ca-bundles\") pod \"4130eeb6-6324-4546-8684-71909617fc84\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.814882 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4130eeb6-6324-4546-8684-71909617fc84-buildcachedir\") pod \"4130eeb6-6324-4546-8684-71909617fc84\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.814912 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4130eeb6-6324-4546-8684-71909617fc84-node-pullsecrets\") pod \"4130eeb6-6324-4546-8684-71909617fc84\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.814939 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/4130eeb6-6324-4546-8684-71909617fc84-builder-dockercfg-bhkdz-push\") pod \"4130eeb6-6324-4546-8684-71909617fc84\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.815006 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-system-configs\") pod \"4130eeb6-6324-4546-8684-71909617fc84\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.815039 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/4130eeb6-6324-4546-8684-71909617fc84-builder-dockercfg-bhkdz-pull\") pod \"4130eeb6-6324-4546-8684-71909617fc84\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.815062 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-container-storage-root\") pod \"4130eeb6-6324-4546-8684-71909617fc84\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.815103 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-build-blob-cache\") pod \"4130eeb6-6324-4546-8684-71909617fc84\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.815141 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-buildworkdir\") pod \"4130eeb6-6324-4546-8684-71909617fc84\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.815208 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-proxy-ca-bundles\") pod \"4130eeb6-6324-4546-8684-71909617fc84\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.815245 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgjjv\" (UniqueName: \"kubernetes.io/projected/4130eeb6-6324-4546-8684-71909617fc84-kube-api-access-fgjjv\") pod \"4130eeb6-6324-4546-8684-71909617fc84\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.815278 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-container-storage-run\") pod \"4130eeb6-6324-4546-8684-71909617fc84\" (UID: \"4130eeb6-6324-4546-8684-71909617fc84\") " Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.814981 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4130eeb6-6324-4546-8684-71909617fc84-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "4130eeb6-6324-4546-8684-71909617fc84" (UID: "4130eeb6-6324-4546-8684-71909617fc84"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.815024 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4130eeb6-6324-4546-8684-71909617fc84-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "4130eeb6-6324-4546-8684-71909617fc84" (UID: "4130eeb6-6324-4546-8684-71909617fc84"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.816315 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "4130eeb6-6324-4546-8684-71909617fc84" (UID: "4130eeb6-6324-4546-8684-71909617fc84"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.816019 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "4130eeb6-6324-4546-8684-71909617fc84" (UID: "4130eeb6-6324-4546-8684-71909617fc84"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.816491 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "4130eeb6-6324-4546-8684-71909617fc84" (UID: "4130eeb6-6324-4546-8684-71909617fc84"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.817075 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "4130eeb6-6324-4546-8684-71909617fc84" (UID: "4130eeb6-6324-4546-8684-71909617fc84"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.818156 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "4130eeb6-6324-4546-8684-71909617fc84" (UID: "4130eeb6-6324-4546-8684-71909617fc84"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.820671 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4130eeb6-6324-4546-8684-71909617fc84-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "4130eeb6-6324-4546-8684-71909617fc84" (UID: "4130eeb6-6324-4546-8684-71909617fc84"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.820727 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4130eeb6-6324-4546-8684-71909617fc84-kube-api-access-fgjjv" (OuterVolumeSpecName: "kube-api-access-fgjjv") pod "4130eeb6-6324-4546-8684-71909617fc84" (UID: "4130eeb6-6324-4546-8684-71909617fc84"). InnerVolumeSpecName "kube-api-access-fgjjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.821438 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4130eeb6-6324-4546-8684-71909617fc84-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "4130eeb6-6324-4546-8684-71909617fc84" (UID: "4130eeb6-6324-4546-8684-71909617fc84"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.917327 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgjjv\" (UniqueName: \"kubernetes.io/projected/4130eeb6-6324-4546-8684-71909617fc84-kube-api-access-fgjjv\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.917366 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.917378 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.917387 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4130eeb6-6324-4546-8684-71909617fc84-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.917396 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4130eeb6-6324-4546-8684-71909617fc84-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.917419 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/4130eeb6-6324-4546-8684-71909617fc84-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.917429 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.917439 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/4130eeb6-6324-4546-8684-71909617fc84-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.917449 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:26 crc kubenswrapper[4791]: I1007 00:24:26.917459 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4130eeb6-6324-4546-8684-71909617fc84-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:27 crc kubenswrapper[4791]: I1007 00:24:27.001902 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "4130eeb6-6324-4546-8684-71909617fc84" (UID: "4130eeb6-6324-4546-8684-71909617fc84"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:24:27 crc kubenswrapper[4791]: I1007 00:24:27.018868 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:27 crc kubenswrapper[4791]: I1007 00:24:27.302960 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "4130eeb6-6324-4546-8684-71909617fc84" (UID: "4130eeb6-6324-4546-8684-71909617fc84"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:24:27 crc kubenswrapper[4791]: I1007 00:24:27.323382 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4130eeb6-6324-4546-8684-71909617fc84-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:24:27 crc kubenswrapper[4791]: I1007 00:24:27.624090 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-1-build_4130eeb6-6324-4546-8684-71909617fc84/docker-build/0.log" Oct 07 00:24:27 crc kubenswrapper[4791]: I1007 00:24:27.625124 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"4130eeb6-6324-4546-8684-71909617fc84","Type":"ContainerDied","Data":"41d0e62e3b57002b1138c3859261d56697aee2e762357d9e98e319301a19b861"} Oct 07 00:24:27 crc kubenswrapper[4791]: I1007 00:24:27.625191 4791 scope.go:117] "RemoveContainer" containerID="e2660238770555971ca8edfc2ba9cf2e0cd51629cb1d131055782990d47bf5f4" Oct 07 00:24:27 crc kubenswrapper[4791]: I1007 00:24:27.625331 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Oct 07 00:24:27 crc kubenswrapper[4791]: I1007 00:24:27.667896 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Oct 07 00:24:27 crc kubenswrapper[4791]: I1007 00:24:27.673975 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Oct 07 00:24:27 crc kubenswrapper[4791]: I1007 00:24:27.707528 4791 scope.go:117] "RemoveContainer" containerID="6a641e60a9006d33f9959ec4d2aea03cfdca5c89d1f178275560bcd57f2dfda2" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.081688 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4130eeb6-6324-4546-8684-71909617fc84" path="/var/lib/kubelet/pods/4130eeb6-6324-4546-8684-71909617fc84/volumes" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.145169 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-2-build"] Oct 07 00:24:28 crc kubenswrapper[4791]: E1007 00:24:28.146279 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4130eeb6-6324-4546-8684-71909617fc84" containerName="manage-dockerfile" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.146386 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="4130eeb6-6324-4546-8684-71909617fc84" containerName="manage-dockerfile" Oct 07 00:24:28 crc kubenswrapper[4791]: E1007 00:24:28.146502 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4130eeb6-6324-4546-8684-71909617fc84" containerName="docker-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.146576 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="4130eeb6-6324-4546-8684-71909617fc84" containerName="docker-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.146768 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="4130eeb6-6324-4546-8684-71909617fc84" containerName="docker-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.150299 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.152500 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-bhkdz" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.154965 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-2-global-ca" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.155046 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-2-sys-config" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.156325 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-2-ca" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.178994 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-2-build"] Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.235513 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.235823 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-system-configs\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.235921 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-builder-dockercfg-bhkdz-push\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.236034 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-buildworkdir\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.236118 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-buildcachedir\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.236211 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-container-storage-run\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.236282 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-container-storage-root\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.236390 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-node-pullsecrets\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.236625 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-blob-cache\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.236747 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-builder-dockercfg-bhkdz-pull\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.236844 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-proxy-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.236955 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n79pc\" (UniqueName: \"kubernetes.io/projected/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-kube-api-access-n79pc\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.338188 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-builder-dockercfg-bhkdz-push\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.338772 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-buildworkdir\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.338990 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-buildcachedir\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.339186 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-buildcachedir\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.339325 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-buildworkdir\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.339645 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-container-storage-run\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.339840 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-container-storage-root\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.340005 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-node-pullsecrets\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.340141 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-node-pullsecrets\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.340177 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-container-storage-root\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.340362 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-blob-cache\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.340569 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-builder-dockercfg-bhkdz-pull\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.340710 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-proxy-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.340816 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n79pc\" (UniqueName: \"kubernetes.io/projected/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-kube-api-access-n79pc\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.340827 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-blob-cache\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.340414 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-container-storage-run\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.340986 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.341351 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-system-configs\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.341777 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.341243 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-proxy-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.342277 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-system-configs\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.344127 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-builder-dockercfg-bhkdz-push\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.353110 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-builder-dockercfg-bhkdz-pull\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.358986 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n79pc\" (UniqueName: \"kubernetes.io/projected/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-kube-api-access-n79pc\") pod \"smart-gateway-operator-2-build\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.466615 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:24:28 crc kubenswrapper[4791]: I1007 00:24:28.917089 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-2-build"] Oct 07 00:24:29 crc kubenswrapper[4791]: I1007 00:24:29.640912 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8","Type":"ContainerStarted","Data":"1a563786e21d6961a973095d1749c1111d7f7870be79b03caaa59dbd80592524"} Oct 07 00:24:29 crc kubenswrapper[4791]: I1007 00:24:29.641315 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8","Type":"ContainerStarted","Data":"46a01cbecff0c01a55e01ef4307287096b9239059f77c26d9ad71e055321b562"} Oct 07 00:24:30 crc kubenswrapper[4791]: I1007 00:24:30.649812 4791 generic.go:334] "Generic (PLEG): container finished" podID="c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" containerID="1a563786e21d6961a973095d1749c1111d7f7870be79b03caaa59dbd80592524" exitCode=0 Oct 07 00:24:30 crc kubenswrapper[4791]: I1007 00:24:30.649872 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8","Type":"ContainerDied","Data":"1a563786e21d6961a973095d1749c1111d7f7870be79b03caaa59dbd80592524"} Oct 07 00:24:31 crc kubenswrapper[4791]: I1007 00:24:31.657276 4791 generic.go:334] "Generic (PLEG): container finished" podID="c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" containerID="2679e41f490737eeb40341c68503b2b8bdce7b69f68ecaf22c510f7e13dc83ba" exitCode=0 Oct 07 00:24:31 crc kubenswrapper[4791]: I1007 00:24:31.657367 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8","Type":"ContainerDied","Data":"2679e41f490737eeb40341c68503b2b8bdce7b69f68ecaf22c510f7e13dc83ba"} Oct 07 00:24:31 crc kubenswrapper[4791]: I1007 00:24:31.702277 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-2-build_c1c69dc4-67d4-4f2c-ad78-aaf9599991f8/manage-dockerfile/0.log" Oct 07 00:24:32 crc kubenswrapper[4791]: I1007 00:24:32.669396 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8","Type":"ContainerStarted","Data":"e7c915748aa21a923e69fade59196c6a1e9e9dcdddd78644e00a1c8f027ecb36"} Oct 07 00:24:32 crc kubenswrapper[4791]: I1007 00:24:32.705668 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-2-build" podStartSLOduration=4.705646892 podStartE2EDuration="4.705646892s" podCreationTimestamp="2025-10-07 00:24:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:24:32.700921455 +0000 UTC m=+799.296859106" watchObservedRunningTime="2025-10-07 00:24:32.705646892 +0000 UTC m=+799.301584543" Oct 07 00:25:41 crc kubenswrapper[4791]: I1007 00:25:41.103997 4791 generic.go:334] "Generic (PLEG): container finished" podID="c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" containerID="e7c915748aa21a923e69fade59196c6a1e9e9dcdddd78644e00a1c8f027ecb36" exitCode=0 Oct 07 00:25:41 crc kubenswrapper[4791]: I1007 00:25:41.104247 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8","Type":"ContainerDied","Data":"e7c915748aa21a923e69fade59196c6a1e9e9dcdddd78644e00a1c8f027ecb36"} Oct 07 00:25:41 crc kubenswrapper[4791]: I1007 00:25:41.600662 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:25:41 crc kubenswrapper[4791]: I1007 00:25:41.600756 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.348840 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.526669 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-container-storage-run\") pod \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.526754 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-builder-dockercfg-bhkdz-pull\") pod \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.526783 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-container-storage-root\") pod \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.526825 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-ca-bundles\") pod \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.526866 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-buildcachedir\") pod \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.527092 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" (UID: "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.527548 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-proxy-ca-bundles\") pod \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.527626 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-blob-cache\") pod \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.527702 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-buildworkdir\") pod \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.527730 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-system-configs\") pod \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.527761 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n79pc\" (UniqueName: \"kubernetes.io/projected/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-kube-api-access-n79pc\") pod \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.527811 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-builder-dockercfg-bhkdz-push\") pod \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.527835 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-node-pullsecrets\") pod \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\" (UID: \"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8\") " Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.528048 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" (UID: "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.528051 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" (UID: "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.528196 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" (UID: "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.528316 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" (UID: "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.528568 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" (UID: "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.528621 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.528647 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.528663 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.528674 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.528688 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.532184 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" (UID: "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.537547 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" (UID: "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.537564 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-kube-api-access-n79pc" (OuterVolumeSpecName: "kube-api-access-n79pc") pod "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" (UID: "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8"). InnerVolumeSpecName "kube-api-access-n79pc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.537619 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" (UID: "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.630328 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.630485 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n79pc\" (UniqueName: \"kubernetes.io/projected/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-kube-api-access-n79pc\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.630500 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.630513 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.630528 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.764515 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" (UID: "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:25:42 crc kubenswrapper[4791]: I1007 00:25:42.833675 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:43 crc kubenswrapper[4791]: I1007 00:25:43.118745 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"c1c69dc4-67d4-4f2c-ad78-aaf9599991f8","Type":"ContainerDied","Data":"46a01cbecff0c01a55e01ef4307287096b9239059f77c26d9ad71e055321b562"} Oct 07 00:25:43 crc kubenswrapper[4791]: I1007 00:25:43.118802 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46a01cbecff0c01a55e01ef4307287096b9239059f77c26d9ad71e055321b562" Oct 07 00:25:43 crc kubenswrapper[4791]: I1007 00:25:43.118974 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Oct 07 00:25:44 crc kubenswrapper[4791]: I1007 00:25:44.654186 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" (UID: "c1c69dc4-67d4-4f2c-ad78-aaf9599991f8"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:25:44 crc kubenswrapper[4791]: I1007 00:25:44.657679 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c1c69dc4-67d4-4f2c-ad78-aaf9599991f8-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.277540 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-core-1-build"] Oct 07 00:25:47 crc kubenswrapper[4791]: E1007 00:25:47.279970 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" containerName="manage-dockerfile" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.280020 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" containerName="manage-dockerfile" Oct 07 00:25:47 crc kubenswrapper[4791]: E1007 00:25:47.280053 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" containerName="docker-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.280062 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" containerName="docker-build" Oct 07 00:25:47 crc kubenswrapper[4791]: E1007 00:25:47.280081 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" containerName="git-clone" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.280089 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" containerName="git-clone" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.280281 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1c69dc4-67d4-4f2c-ad78-aaf9599991f8" containerName="docker-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.281358 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.284826 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-bhkdz" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.284870 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-1-sys-config" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.284829 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-1-ca" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.285063 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-1-global-ca" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.305365 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-1-build"] Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.399792 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-buildworkdir\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.399845 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4czh\" (UniqueName: \"kubernetes.io/projected/be56ae26-d609-4dc6-93fb-70a7a2d5d524-kube-api-access-k4czh\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.399905 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-ca-bundles\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.399937 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-blob-cache\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.400095 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-proxy-ca-bundles\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.400147 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/be56ae26-d609-4dc6-93fb-70a7a2d5d524-builder-dockercfg-bhkdz-push\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.400238 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-system-configs\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.400371 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-container-storage-run\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.400502 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/be56ae26-d609-4dc6-93fb-70a7a2d5d524-node-pullsecrets\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.400581 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-container-storage-root\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.400659 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/be56ae26-d609-4dc6-93fb-70a7a2d5d524-buildcachedir\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.400715 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/be56ae26-d609-4dc6-93fb-70a7a2d5d524-builder-dockercfg-bhkdz-pull\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.502512 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-container-storage-run\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.502577 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/be56ae26-d609-4dc6-93fb-70a7a2d5d524-node-pullsecrets\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.502603 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-container-storage-root\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.502639 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/be56ae26-d609-4dc6-93fb-70a7a2d5d524-buildcachedir\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.502665 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/be56ae26-d609-4dc6-93fb-70a7a2d5d524-builder-dockercfg-bhkdz-pull\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.502699 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-buildworkdir\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.502719 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4czh\" (UniqueName: \"kubernetes.io/projected/be56ae26-d609-4dc6-93fb-70a7a2d5d524-kube-api-access-k4czh\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.502767 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-ca-bundles\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.502780 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/be56ae26-d609-4dc6-93fb-70a7a2d5d524-buildcachedir\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.502798 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-blob-cache\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.502974 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/be56ae26-d609-4dc6-93fb-70a7a2d5d524-node-pullsecrets\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.503046 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-proxy-ca-bundles\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.503090 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/be56ae26-d609-4dc6-93fb-70a7a2d5d524-builder-dockercfg-bhkdz-push\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.503175 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-blob-cache\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.503181 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-system-configs\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.503383 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-container-storage-root\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.503932 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-proxy-ca-bundles\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.503961 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-buildworkdir\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.504119 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-system-configs\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.504195 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-ca-bundles\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.504372 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-container-storage-run\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.511202 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/be56ae26-d609-4dc6-93fb-70a7a2d5d524-builder-dockercfg-bhkdz-pull\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.511621 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/be56ae26-d609-4dc6-93fb-70a7a2d5d524-builder-dockercfg-bhkdz-push\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.523211 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4czh\" (UniqueName: \"kubernetes.io/projected/be56ae26-d609-4dc6-93fb-70a7a2d5d524-kube-api-access-k4czh\") pod \"sg-core-1-build\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.614703 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Oct 07 00:25:47 crc kubenswrapper[4791]: I1007 00:25:47.812768 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-1-build"] Oct 07 00:25:48 crc kubenswrapper[4791]: I1007 00:25:48.170416 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"be56ae26-d609-4dc6-93fb-70a7a2d5d524","Type":"ContainerStarted","Data":"74a00c88ee1d1c5d06ec309f787299aa57337be7f0111fc8200b59dd0043427d"} Oct 07 00:25:48 crc kubenswrapper[4791]: I1007 00:25:48.170488 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"be56ae26-d609-4dc6-93fb-70a7a2d5d524","Type":"ContainerStarted","Data":"8961456293d9f7b43c9532dad1696eea876db2bcc1ca26b63b3f5edc734ca1b8"} Oct 07 00:25:49 crc kubenswrapper[4791]: I1007 00:25:49.186475 4791 generic.go:334] "Generic (PLEG): container finished" podID="be56ae26-d609-4dc6-93fb-70a7a2d5d524" containerID="74a00c88ee1d1c5d06ec309f787299aa57337be7f0111fc8200b59dd0043427d" exitCode=0 Oct 07 00:25:49 crc kubenswrapper[4791]: I1007 00:25:49.186554 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"be56ae26-d609-4dc6-93fb-70a7a2d5d524","Type":"ContainerDied","Data":"74a00c88ee1d1c5d06ec309f787299aa57337be7f0111fc8200b59dd0043427d"} Oct 07 00:25:50 crc kubenswrapper[4791]: I1007 00:25:50.198266 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"be56ae26-d609-4dc6-93fb-70a7a2d5d524","Type":"ContainerStarted","Data":"ef9337382b344bc75adeea7614f06f5025d643aaaecc3eeacb9b06dbd3acba5a"} Oct 07 00:25:50 crc kubenswrapper[4791]: I1007 00:25:50.239302 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-core-1-build" podStartSLOduration=3.239270487 podStartE2EDuration="3.239270487s" podCreationTimestamp="2025-10-07 00:25:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:25:50.233297757 +0000 UTC m=+876.829235448" watchObservedRunningTime="2025-10-07 00:25:50.239270487 +0000 UTC m=+876.835208158" Oct 07 00:25:57 crc kubenswrapper[4791]: I1007 00:25:57.699909 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-core-1-build"] Oct 07 00:25:57 crc kubenswrapper[4791]: I1007 00:25:57.701464 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/sg-core-1-build" podUID="be56ae26-d609-4dc6-93fb-70a7a2d5d524" containerName="docker-build" containerID="cri-o://ef9337382b344bc75adeea7614f06f5025d643aaaecc3eeacb9b06dbd3acba5a" gracePeriod=30 Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.074438 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-1-build_be56ae26-d609-4dc6-93fb-70a7a2d5d524/docker-build/0.log" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.074942 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.261931 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-1-build_be56ae26-d609-4dc6-93fb-70a7a2d5d524/docker-build/0.log" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.263066 4791 generic.go:334] "Generic (PLEG): container finished" podID="be56ae26-d609-4dc6-93fb-70a7a2d5d524" containerID="ef9337382b344bc75adeea7614f06f5025d643aaaecc3eeacb9b06dbd3acba5a" exitCode=1 Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.263152 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.263143 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"be56ae26-d609-4dc6-93fb-70a7a2d5d524","Type":"ContainerDied","Data":"ef9337382b344bc75adeea7614f06f5025d643aaaecc3eeacb9b06dbd3acba5a"} Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.263303 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"be56ae26-d609-4dc6-93fb-70a7a2d5d524","Type":"ContainerDied","Data":"8961456293d9f7b43c9532dad1696eea876db2bcc1ca26b63b3f5edc734ca1b8"} Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.263331 4791 scope.go:117] "RemoveContainer" containerID="ef9337382b344bc75adeea7614f06f5025d643aaaecc3eeacb9b06dbd3acba5a" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.272101 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-proxy-ca-bundles\") pod \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.272151 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/be56ae26-d609-4dc6-93fb-70a7a2d5d524-builder-dockercfg-bhkdz-push\") pod \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.272192 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-container-storage-run\") pod \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.272239 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/be56ae26-d609-4dc6-93fb-70a7a2d5d524-buildcachedir\") pod \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.272292 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-blob-cache\") pod \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.272329 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/be56ae26-d609-4dc6-93fb-70a7a2d5d524-builder-dockercfg-bhkdz-pull\") pod \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.272361 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/be56ae26-d609-4dc6-93fb-70a7a2d5d524-node-pullsecrets\") pod \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.272433 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-system-configs\") pod \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.272451 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-container-storage-root\") pod \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.272473 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-buildworkdir\") pod \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.272496 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-ca-bundles\") pod \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.272562 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4czh\" (UniqueName: \"kubernetes.io/projected/be56ae26-d609-4dc6-93fb-70a7a2d5d524-kube-api-access-k4czh\") pod \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\" (UID: \"be56ae26-d609-4dc6-93fb-70a7a2d5d524\") " Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.272933 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be56ae26-d609-4dc6-93fb-70a7a2d5d524-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "be56ae26-d609-4dc6-93fb-70a7a2d5d524" (UID: "be56ae26-d609-4dc6-93fb-70a7a2d5d524"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.273088 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "be56ae26-d609-4dc6-93fb-70a7a2d5d524" (UID: "be56ae26-d609-4dc6-93fb-70a7a2d5d524"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.273137 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be56ae26-d609-4dc6-93fb-70a7a2d5d524-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "be56ae26-d609-4dc6-93fb-70a7a2d5d524" (UID: "be56ae26-d609-4dc6-93fb-70a7a2d5d524"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.273924 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "be56ae26-d609-4dc6-93fb-70a7a2d5d524" (UID: "be56ae26-d609-4dc6-93fb-70a7a2d5d524"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.274368 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "be56ae26-d609-4dc6-93fb-70a7a2d5d524" (UID: "be56ae26-d609-4dc6-93fb-70a7a2d5d524"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.275064 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "be56ae26-d609-4dc6-93fb-70a7a2d5d524" (UID: "be56ae26-d609-4dc6-93fb-70a7a2d5d524"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.275694 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "be56ae26-d609-4dc6-93fb-70a7a2d5d524" (UID: "be56ae26-d609-4dc6-93fb-70a7a2d5d524"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.279109 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be56ae26-d609-4dc6-93fb-70a7a2d5d524-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "be56ae26-d609-4dc6-93fb-70a7a2d5d524" (UID: "be56ae26-d609-4dc6-93fb-70a7a2d5d524"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.279629 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be56ae26-d609-4dc6-93fb-70a7a2d5d524-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "be56ae26-d609-4dc6-93fb-70a7a2d5d524" (UID: "be56ae26-d609-4dc6-93fb-70a7a2d5d524"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.279850 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be56ae26-d609-4dc6-93fb-70a7a2d5d524-kube-api-access-k4czh" (OuterVolumeSpecName: "kube-api-access-k4czh") pod "be56ae26-d609-4dc6-93fb-70a7a2d5d524" (UID: "be56ae26-d609-4dc6-93fb-70a7a2d5d524"). InnerVolumeSpecName "kube-api-access-k4czh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.315376 4791 scope.go:117] "RemoveContainer" containerID="74a00c88ee1d1c5d06ec309f787299aa57337be7f0111fc8200b59dd0043427d" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.357577 4791 scope.go:117] "RemoveContainer" containerID="ef9337382b344bc75adeea7614f06f5025d643aaaecc3eeacb9b06dbd3acba5a" Oct 07 00:25:58 crc kubenswrapper[4791]: E1007 00:25:58.358290 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef9337382b344bc75adeea7614f06f5025d643aaaecc3eeacb9b06dbd3acba5a\": container with ID starting with ef9337382b344bc75adeea7614f06f5025d643aaaecc3eeacb9b06dbd3acba5a not found: ID does not exist" containerID="ef9337382b344bc75adeea7614f06f5025d643aaaecc3eeacb9b06dbd3acba5a" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.358373 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef9337382b344bc75adeea7614f06f5025d643aaaecc3eeacb9b06dbd3acba5a"} err="failed to get container status \"ef9337382b344bc75adeea7614f06f5025d643aaaecc3eeacb9b06dbd3acba5a\": rpc error: code = NotFound desc = could not find container \"ef9337382b344bc75adeea7614f06f5025d643aaaecc3eeacb9b06dbd3acba5a\": container with ID starting with ef9337382b344bc75adeea7614f06f5025d643aaaecc3eeacb9b06dbd3acba5a not found: ID does not exist" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.358464 4791 scope.go:117] "RemoveContainer" containerID="74a00c88ee1d1c5d06ec309f787299aa57337be7f0111fc8200b59dd0043427d" Oct 07 00:25:58 crc kubenswrapper[4791]: E1007 00:25:58.359353 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74a00c88ee1d1c5d06ec309f787299aa57337be7f0111fc8200b59dd0043427d\": container with ID starting with 74a00c88ee1d1c5d06ec309f787299aa57337be7f0111fc8200b59dd0043427d not found: ID does not exist" containerID="74a00c88ee1d1c5d06ec309f787299aa57337be7f0111fc8200b59dd0043427d" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.359450 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74a00c88ee1d1c5d06ec309f787299aa57337be7f0111fc8200b59dd0043427d"} err="failed to get container status \"74a00c88ee1d1c5d06ec309f787299aa57337be7f0111fc8200b59dd0043427d\": rpc error: code = NotFound desc = could not find container \"74a00c88ee1d1c5d06ec309f787299aa57337be7f0111fc8200b59dd0043427d\": container with ID starting with 74a00c88ee1d1c5d06ec309f787299aa57337be7f0111fc8200b59dd0043427d not found: ID does not exist" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.373698 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.373734 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/be56ae26-d609-4dc6-93fb-70a7a2d5d524-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.373746 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.373757 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/be56ae26-d609-4dc6-93fb-70a7a2d5d524-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.373765 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/be56ae26-d609-4dc6-93fb-70a7a2d5d524-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.373774 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/be56ae26-d609-4dc6-93fb-70a7a2d5d524-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.373783 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.373794 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.373804 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.373813 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4czh\" (UniqueName: \"kubernetes.io/projected/be56ae26-d609-4dc6-93fb-70a7a2d5d524-kube-api-access-k4czh\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.398498 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "be56ae26-d609-4dc6-93fb-70a7a2d5d524" (UID: "be56ae26-d609-4dc6-93fb-70a7a2d5d524"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.475740 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.573323 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "be56ae26-d609-4dc6-93fb-70a7a2d5d524" (UID: "be56ae26-d609-4dc6-93fb-70a7a2d5d524"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.576818 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/be56ae26-d609-4dc6-93fb-70a7a2d5d524-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.896691 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-core-1-build"] Oct 07 00:25:58 crc kubenswrapper[4791]: I1007 00:25:58.905948 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/sg-core-1-build"] Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.477074 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-core-2-build"] Oct 07 00:25:59 crc kubenswrapper[4791]: E1007 00:25:59.477364 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be56ae26-d609-4dc6-93fb-70a7a2d5d524" containerName="manage-dockerfile" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.477380 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="be56ae26-d609-4dc6-93fb-70a7a2d5d524" containerName="manage-dockerfile" Oct 07 00:25:59 crc kubenswrapper[4791]: E1007 00:25:59.477421 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be56ae26-d609-4dc6-93fb-70a7a2d5d524" containerName="docker-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.477429 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="be56ae26-d609-4dc6-93fb-70a7a2d5d524" containerName="docker-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.477537 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="be56ae26-d609-4dc6-93fb-70a7a2d5d524" containerName="docker-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.496071 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.499538 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-bhkdz" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.504017 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-2-sys-config" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.504056 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-2-ca" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.504281 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-2-global-ca" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.507472 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-2-build"] Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.696152 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-blob-cache\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.696235 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-container-storage-root\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.696284 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-ca-bundles\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.696642 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-928l4\" (UniqueName: \"kubernetes.io/projected/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-kube-api-access-928l4\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.696687 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-buildcachedir\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.696736 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-builder-dockercfg-bhkdz-pull\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.696783 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-buildworkdir\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.696868 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-container-storage-run\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.697068 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-system-configs\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.697127 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-node-pullsecrets\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.697152 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-builder-dockercfg-bhkdz-push\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.697285 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-proxy-ca-bundles\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.798865 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-system-configs\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.798938 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-node-pullsecrets\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.798963 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-builder-dockercfg-bhkdz-push\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.798987 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-proxy-ca-bundles\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.799020 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-blob-cache\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.799049 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-container-storage-root\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.799079 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-ca-bundles\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.799125 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-928l4\" (UniqueName: \"kubernetes.io/projected/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-kube-api-access-928l4\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.799150 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-buildcachedir\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.799156 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-node-pullsecrets\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.799180 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-builder-dockercfg-bhkdz-pull\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.799298 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-buildworkdir\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.799362 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-container-storage-run\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.799674 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-container-storage-root\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.799909 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-container-storage-run\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.799972 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-buildcachedir\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.800250 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-buildworkdir\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.800689 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-system-configs\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.800696 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-proxy-ca-bundles\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.800934 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-ca-bundles\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.801138 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-blob-cache\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.802856 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-builder-dockercfg-bhkdz-pull\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.806192 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-builder-dockercfg-bhkdz-push\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.820526 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-928l4\" (UniqueName: \"kubernetes.io/projected/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-kube-api-access-928l4\") pod \"sg-core-2-build\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " pod="service-telemetry/sg-core-2-build" Oct 07 00:25:59 crc kubenswrapper[4791]: I1007 00:25:59.823489 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Oct 07 00:26:00 crc kubenswrapper[4791]: I1007 00:26:00.055791 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-2-build"] Oct 07 00:26:00 crc kubenswrapper[4791]: I1007 00:26:00.079367 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be56ae26-d609-4dc6-93fb-70a7a2d5d524" path="/var/lib/kubelet/pods/be56ae26-d609-4dc6-93fb-70a7a2d5d524/volumes" Oct 07 00:26:00 crc kubenswrapper[4791]: I1007 00:26:00.279256 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f","Type":"ContainerStarted","Data":"97df901c9040863097d95d02732b978e2c43cefd39e923bbdd2e218914d76896"} Oct 07 00:26:01 crc kubenswrapper[4791]: I1007 00:26:01.288878 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f","Type":"ContainerStarted","Data":"507c7d8a215fb66d3e44bbd4b50d8a6e7fbef94d4f9b11c13b1bbf5b70c53e88"} Oct 07 00:26:02 crc kubenswrapper[4791]: I1007 00:26:02.299261 4791 generic.go:334] "Generic (PLEG): container finished" podID="0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" containerID="507c7d8a215fb66d3e44bbd4b50d8a6e7fbef94d4f9b11c13b1bbf5b70c53e88" exitCode=0 Oct 07 00:26:02 crc kubenswrapper[4791]: I1007 00:26:02.299309 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f","Type":"ContainerDied","Data":"507c7d8a215fb66d3e44bbd4b50d8a6e7fbef94d4f9b11c13b1bbf5b70c53e88"} Oct 07 00:26:03 crc kubenswrapper[4791]: I1007 00:26:03.308739 4791 generic.go:334] "Generic (PLEG): container finished" podID="0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" containerID="0f6417eb5e31f39efc0b1428809a1a72b3b884de29db515a75474afa29e7d4d6" exitCode=0 Oct 07 00:26:03 crc kubenswrapper[4791]: I1007 00:26:03.308807 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f","Type":"ContainerDied","Data":"0f6417eb5e31f39efc0b1428809a1a72b3b884de29db515a75474afa29e7d4d6"} Oct 07 00:26:03 crc kubenswrapper[4791]: I1007 00:26:03.354540 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-2-build_0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f/manage-dockerfile/0.log" Oct 07 00:26:04 crc kubenswrapper[4791]: I1007 00:26:04.323059 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f","Type":"ContainerStarted","Data":"3c4751699725bb337f86d199c2d5c3c6d3df592d7c7a94a7673c5672ca1c6742"} Oct 07 00:26:04 crc kubenswrapper[4791]: I1007 00:26:04.369159 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-core-2-build" podStartSLOduration=5.369132902 podStartE2EDuration="5.369132902s" podCreationTimestamp="2025-10-07 00:25:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:26:04.367910147 +0000 UTC m=+890.963847808" watchObservedRunningTime="2025-10-07 00:26:04.369132902 +0000 UTC m=+890.965070573" Oct 07 00:26:11 crc kubenswrapper[4791]: I1007 00:26:11.600753 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:26:11 crc kubenswrapper[4791]: I1007 00:26:11.601435 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:26:41 crc kubenswrapper[4791]: I1007 00:26:41.601143 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:26:41 crc kubenswrapper[4791]: I1007 00:26:41.601839 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:26:41 crc kubenswrapper[4791]: I1007 00:26:41.601902 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:26:41 crc kubenswrapper[4791]: I1007 00:26:41.602905 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7cfedf539256dfacf0cf2c9e6ad1df35a6f94ed8695670a64b90aaad70e4d317"} pod="openshift-machine-config-operator/machine-config-daemon-h728c" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 00:26:41 crc kubenswrapper[4791]: I1007 00:26:41.603003 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" containerID="cri-o://7cfedf539256dfacf0cf2c9e6ad1df35a6f94ed8695670a64b90aaad70e4d317" gracePeriod=600 Oct 07 00:26:42 crc kubenswrapper[4791]: I1007 00:26:42.610720 4791 generic.go:334] "Generic (PLEG): container finished" podID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerID="7cfedf539256dfacf0cf2c9e6ad1df35a6f94ed8695670a64b90aaad70e4d317" exitCode=0 Oct 07 00:26:42 crc kubenswrapper[4791]: I1007 00:26:42.610802 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerDied","Data":"7cfedf539256dfacf0cf2c9e6ad1df35a6f94ed8695670a64b90aaad70e4d317"} Oct 07 00:26:42 crc kubenswrapper[4791]: I1007 00:26:42.611236 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerStarted","Data":"707f6b1f578a3829a964bf21bc15d9b1043ee4b1415c1be3d0c64a4ecaf4fa34"} Oct 07 00:26:42 crc kubenswrapper[4791]: I1007 00:26:42.611265 4791 scope.go:117] "RemoveContainer" containerID="37631d119a9543621b7ec3462a6ba16de3d5a41d64a99b73d2268d35fabb5173" Oct 07 00:29:02 crc kubenswrapper[4791]: I1007 00:29:02.591991 4791 generic.go:334] "Generic (PLEG): container finished" podID="0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" containerID="3c4751699725bb337f86d199c2d5c3c6d3df592d7c7a94a7673c5672ca1c6742" exitCode=0 Oct 07 00:29:02 crc kubenswrapper[4791]: I1007 00:29:02.592132 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f","Type":"ContainerDied","Data":"3c4751699725bb337f86d199c2d5c3c6d3df592d7c7a94a7673c5672ca1c6742"} Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.847276 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.982093 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-builder-dockercfg-bhkdz-pull\") pod \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.982159 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-buildworkdir\") pod \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.982234 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-ca-bundles\") pod \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.982263 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-buildcachedir\") pod \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.982288 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-928l4\" (UniqueName: \"kubernetes.io/projected/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-kube-api-access-928l4\") pod \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.982339 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-blob-cache\") pod \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.982379 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-system-configs\") pod \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.982416 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-builder-dockercfg-bhkdz-push\") pod \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.982440 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-container-storage-run\") pod \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.982463 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-container-storage-root\") pod \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.982489 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-proxy-ca-bundles\") pod \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.982507 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-node-pullsecrets\") pod \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\" (UID: \"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f\") " Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.982787 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" (UID: "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.982873 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" (UID: "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.984513 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" (UID: "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.984544 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" (UID: "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.985872 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" (UID: "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.990151 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" (UID: "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.990306 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" (UID: "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.991059 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" (UID: "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:29:03 crc kubenswrapper[4791]: I1007 00:29:03.995005 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-kube-api-access-928l4" (OuterVolumeSpecName: "kube-api-access-928l4") pod "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" (UID: "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f"). InnerVolumeSpecName "kube-api-access-928l4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.006149 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" (UID: "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.084461 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.084499 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.084509 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-928l4\" (UniqueName: \"kubernetes.io/projected/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-kube-api-access-928l4\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.084521 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.084531 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.084543 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.084551 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.084560 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.084569 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.084578 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.386684 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" (UID: "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.389507 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.610488 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f","Type":"ContainerDied","Data":"97df901c9040863097d95d02732b978e2c43cefd39e923bbdd2e218914d76896"} Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.610544 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97df901c9040863097d95d02732b978e2c43cefd39e923bbdd2e218914d76896" Oct 07 00:29:04 crc kubenswrapper[4791]: I1007 00:29:04.610568 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Oct 07 00:29:06 crc kubenswrapper[4791]: I1007 00:29:06.483352 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" (UID: "0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:29:06 crc kubenswrapper[4791]: I1007 00:29:06.523687 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.026589 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-bridge-1-build"] Oct 07 00:29:09 crc kubenswrapper[4791]: E1007 00:29:09.027322 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" containerName="git-clone" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.027341 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" containerName="git-clone" Oct 07 00:29:09 crc kubenswrapper[4791]: E1007 00:29:09.027362 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" containerName="manage-dockerfile" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.027370 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" containerName="manage-dockerfile" Oct 07 00:29:09 crc kubenswrapper[4791]: E1007 00:29:09.027390 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" containerName="docker-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.027398 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" containerName="docker-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.027541 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e2c0f3d-ee8c-4696-8e9c-dc34da6d5c5f" containerName="docker-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.028373 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.031012 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-1-sys-config" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.031163 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-bhkdz" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.031012 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-1-ca" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.031870 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-1-global-ca" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.042003 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.159505 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwwq9\" (UniqueName: \"kubernetes.io/projected/c5d659d9-b7c7-443b-8914-d533e16f750f-kube-api-access-nwwq9\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.159553 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/c5d659d9-b7c7-443b-8914-d533e16f750f-builder-dockercfg-bhkdz-pull\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.159586 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-build-blob-cache\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.159606 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-proxy-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.159628 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-system-configs\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.159786 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-container-storage-run\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.159854 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c5d659d9-b7c7-443b-8914-d533e16f750f-node-pullsecrets\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.159925 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/c5d659d9-b7c7-443b-8914-d533e16f750f-builder-dockercfg-bhkdz-push\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.160027 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-buildworkdir\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.160128 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-container-storage-root\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.160154 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.160245 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c5d659d9-b7c7-443b-8914-d533e16f750f-buildcachedir\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.261715 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c5d659d9-b7c7-443b-8914-d533e16f750f-buildcachedir\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.262041 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwwq9\" (UniqueName: \"kubernetes.io/projected/c5d659d9-b7c7-443b-8914-d533e16f750f-kube-api-access-nwwq9\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.262494 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/c5d659d9-b7c7-443b-8914-d533e16f750f-builder-dockercfg-bhkdz-pull\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.262600 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-build-blob-cache\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.262673 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-proxy-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.262751 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-system-configs\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.263011 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-container-storage-run\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.263089 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c5d659d9-b7c7-443b-8914-d533e16f750f-node-pullsecrets\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.263168 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/c5d659d9-b7c7-443b-8914-d533e16f750f-builder-dockercfg-bhkdz-push\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.263279 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-buildworkdir\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.264057 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-container-storage-root\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.264149 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.264198 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-system-configs\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.264021 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-proxy-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.264244 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-container-storage-root\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.261823 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c5d659d9-b7c7-443b-8914-d533e16f750f-buildcachedir\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.264058 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-buildworkdir\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.263617 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c5d659d9-b7c7-443b-8914-d533e16f750f-node-pullsecrets\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.263983 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-build-blob-cache\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.264507 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-container-storage-run\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.265373 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.269738 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/c5d659d9-b7c7-443b-8914-d533e16f750f-builder-dockercfg-bhkdz-pull\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.270727 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/c5d659d9-b7c7-443b-8914-d533e16f750f-builder-dockercfg-bhkdz-push\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.280387 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwwq9\" (UniqueName: \"kubernetes.io/projected/c5d659d9-b7c7-443b-8914-d533e16f750f-kube-api-access-nwwq9\") pod \"sg-bridge-1-build\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.353118 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.547589 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Oct 07 00:29:09 crc kubenswrapper[4791]: I1007 00:29:09.648001 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"c5d659d9-b7c7-443b-8914-d533e16f750f","Type":"ContainerStarted","Data":"b0fc5d7e77a33b00c75d7717a6cfda54988239e4076bf52cc351316e1fbb8946"} Oct 07 00:29:10 crc kubenswrapper[4791]: I1007 00:29:10.657236 4791 generic.go:334] "Generic (PLEG): container finished" podID="c5d659d9-b7c7-443b-8914-d533e16f750f" containerID="81999fb1d0e5bdad043dd8ff05c313a90ea7c208a10e50490a651b23083c93d7" exitCode=0 Oct 07 00:29:10 crc kubenswrapper[4791]: I1007 00:29:10.657295 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"c5d659d9-b7c7-443b-8914-d533e16f750f","Type":"ContainerDied","Data":"81999fb1d0e5bdad043dd8ff05c313a90ea7c208a10e50490a651b23083c93d7"} Oct 07 00:29:11 crc kubenswrapper[4791]: I1007 00:29:11.600819 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:29:11 crc kubenswrapper[4791]: I1007 00:29:11.601616 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:29:11 crc kubenswrapper[4791]: I1007 00:29:11.665686 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"c5d659d9-b7c7-443b-8914-d533e16f750f","Type":"ContainerStarted","Data":"b62955ee651d0a12a61f29a6a95303cac46ef05377e1ec1f1f46e6b54eb6eaa0"} Oct 07 00:29:11 crc kubenswrapper[4791]: I1007 00:29:11.694977 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-bridge-1-build" podStartSLOduration=2.694943151 podStartE2EDuration="2.694943151s" podCreationTimestamp="2025-10-07 00:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:29:11.689169092 +0000 UTC m=+1078.285106763" watchObservedRunningTime="2025-10-07 00:29:11.694943151 +0000 UTC m=+1078.290880802" Oct 07 00:29:17 crc kubenswrapper[4791]: I1007 00:29:17.705358 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-1-build_c5d659d9-b7c7-443b-8914-d533e16f750f/docker-build/0.log" Oct 07 00:29:17 crc kubenswrapper[4791]: I1007 00:29:17.706526 4791 generic.go:334] "Generic (PLEG): container finished" podID="c5d659d9-b7c7-443b-8914-d533e16f750f" containerID="b62955ee651d0a12a61f29a6a95303cac46ef05377e1ec1f1f46e6b54eb6eaa0" exitCode=1 Oct 07 00:29:17 crc kubenswrapper[4791]: I1007 00:29:17.706570 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"c5d659d9-b7c7-443b-8914-d533e16f750f","Type":"ContainerDied","Data":"b62955ee651d0a12a61f29a6a95303cac46ef05377e1ec1f1f46e6b54eb6eaa0"} Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.030093 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-1-build_c5d659d9-b7c7-443b-8914-d533e16f750f/docker-build/0.log" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.030994 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.108332 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c5d659d9-b7c7-443b-8914-d533e16f750f-node-pullsecrets\") pod \"c5d659d9-b7c7-443b-8914-d533e16f750f\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.108667 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-container-storage-run\") pod \"c5d659d9-b7c7-443b-8914-d533e16f750f\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.108463 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5d659d9-b7c7-443b-8914-d533e16f750f-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "c5d659d9-b7c7-443b-8914-d533e16f750f" (UID: "c5d659d9-b7c7-443b-8914-d533e16f750f"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.108747 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/c5d659d9-b7c7-443b-8914-d533e16f750f-builder-dockercfg-bhkdz-pull\") pod \"c5d659d9-b7c7-443b-8914-d533e16f750f\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.108786 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-container-storage-root\") pod \"c5d659d9-b7c7-443b-8914-d533e16f750f\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.108818 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-system-configs\") pod \"c5d659d9-b7c7-443b-8914-d533e16f750f\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.108855 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-buildworkdir\") pod \"c5d659d9-b7c7-443b-8914-d533e16f750f\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.108898 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/c5d659d9-b7c7-443b-8914-d533e16f750f-builder-dockercfg-bhkdz-push\") pod \"c5d659d9-b7c7-443b-8914-d533e16f750f\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.109482 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "c5d659d9-b7c7-443b-8914-d533e16f750f" (UID: "c5d659d9-b7c7-443b-8914-d533e16f750f"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.109591 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-build-blob-cache\") pod \"c5d659d9-b7c7-443b-8914-d533e16f750f\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.109735 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "c5d659d9-b7c7-443b-8914-d533e16f750f" (UID: "c5d659d9-b7c7-443b-8914-d533e16f750f"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.110330 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "c5d659d9-b7c7-443b-8914-d533e16f750f" (UID: "c5d659d9-b7c7-443b-8914-d533e16f750f"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.109627 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwwq9\" (UniqueName: \"kubernetes.io/projected/c5d659d9-b7c7-443b-8914-d533e16f750f-kube-api-access-nwwq9\") pod \"c5d659d9-b7c7-443b-8914-d533e16f750f\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.110706 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c5d659d9-b7c7-443b-8914-d533e16f750f-buildcachedir\") pod \"c5d659d9-b7c7-443b-8914-d533e16f750f\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.110768 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-proxy-ca-bundles\") pod \"c5d659d9-b7c7-443b-8914-d533e16f750f\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.110804 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-ca-bundles\") pod \"c5d659d9-b7c7-443b-8914-d533e16f750f\" (UID: \"c5d659d9-b7c7-443b-8914-d533e16f750f\") " Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.111178 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c5d659d9-b7c7-443b-8914-d533e16f750f-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.111209 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.111229 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.111246 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.111637 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5d659d9-b7c7-443b-8914-d533e16f750f-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "c5d659d9-b7c7-443b-8914-d533e16f750f" (UID: "c5d659d9-b7c7-443b-8914-d533e16f750f"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.112062 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "c5d659d9-b7c7-443b-8914-d533e16f750f" (UID: "c5d659d9-b7c7-443b-8914-d533e16f750f"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.112181 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "c5d659d9-b7c7-443b-8914-d533e16f750f" (UID: "c5d659d9-b7c7-443b-8914-d533e16f750f"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.114151 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5d659d9-b7c7-443b-8914-d533e16f750f-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "c5d659d9-b7c7-443b-8914-d533e16f750f" (UID: "c5d659d9-b7c7-443b-8914-d533e16f750f"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.116083 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5d659d9-b7c7-443b-8914-d533e16f750f-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "c5d659d9-b7c7-443b-8914-d533e16f750f" (UID: "c5d659d9-b7c7-443b-8914-d533e16f750f"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.119562 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5d659d9-b7c7-443b-8914-d533e16f750f-kube-api-access-nwwq9" (OuterVolumeSpecName: "kube-api-access-nwwq9") pod "c5d659d9-b7c7-443b-8914-d533e16f750f" (UID: "c5d659d9-b7c7-443b-8914-d533e16f750f"). InnerVolumeSpecName "kube-api-access-nwwq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.181227 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "c5d659d9-b7c7-443b-8914-d533e16f750f" (UID: "c5d659d9-b7c7-443b-8914-d533e16f750f"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.212799 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.212839 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwwq9\" (UniqueName: \"kubernetes.io/projected/c5d659d9-b7c7-443b-8914-d533e16f750f-kube-api-access-nwwq9\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.212853 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c5d659d9-b7c7-443b-8914-d533e16f750f-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.212866 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.212878 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c5d659d9-b7c7-443b-8914-d533e16f750f-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.212894 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/c5d659d9-b7c7-443b-8914-d533e16f750f-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.212904 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/c5d659d9-b7c7-443b-8914-d533e16f750f-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.223914 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.228697 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.498998 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "c5d659d9-b7c7-443b-8914-d533e16f750f" (UID: "c5d659d9-b7c7-443b-8914-d533e16f750f"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.518077 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c5d659d9-b7c7-443b-8914-d533e16f750f-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.719217 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-1-build_c5d659d9-b7c7-443b-8914-d533e16f750f/docker-build/0.log" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.719758 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0fc5d7e77a33b00c75d7717a6cfda54988239e4076bf52cc351316e1fbb8946" Oct 07 00:29:19 crc kubenswrapper[4791]: I1007 00:29:19.719789 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.085120 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5d659d9-b7c7-443b-8914-d533e16f750f" path="/var/lib/kubelet/pods/c5d659d9-b7c7-443b-8914-d533e16f750f/volumes" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.876742 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-bridge-2-build"] Oct 07 00:29:20 crc kubenswrapper[4791]: E1007 00:29:20.877078 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5d659d9-b7c7-443b-8914-d533e16f750f" containerName="docker-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.877099 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5d659d9-b7c7-443b-8914-d533e16f750f" containerName="docker-build" Oct 07 00:29:20 crc kubenswrapper[4791]: E1007 00:29:20.877132 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5d659d9-b7c7-443b-8914-d533e16f750f" containerName="manage-dockerfile" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.877142 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5d659d9-b7c7-443b-8914-d533e16f750f" containerName="manage-dockerfile" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.877285 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5d659d9-b7c7-443b-8914-d533e16f750f" containerName="docker-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.878492 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.884031 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-bhkdz" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.884363 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-2-global-ca" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.884519 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-2-ca" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.884877 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-2-sys-config" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.900896 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-2-build"] Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.939043 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-container-storage-root\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.939130 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-builder-dockercfg-bhkdz-pull\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.939182 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-container-storage-run\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.939373 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-buildcachedir\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.939466 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rs9l\" (UniqueName: \"kubernetes.io/projected/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-kube-api-access-4rs9l\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.939559 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-blob-cache\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.939615 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-proxy-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.939707 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.939827 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-builder-dockercfg-bhkdz-push\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.939934 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-node-pullsecrets\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.940055 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-system-configs\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:20 crc kubenswrapper[4791]: I1007 00:29:20.940120 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-buildworkdir\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.041689 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rs9l\" (UniqueName: \"kubernetes.io/projected/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-kube-api-access-4rs9l\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.041764 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-buildcachedir\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.041800 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-blob-cache\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.041836 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-proxy-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.041864 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.041893 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-builder-dockercfg-bhkdz-push\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.041920 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-node-pullsecrets\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.041945 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-system-configs\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.041982 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-buildcachedir\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.042010 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-buildworkdir\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.042184 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-container-storage-root\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.042224 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-builder-dockercfg-bhkdz-pull\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.042280 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-container-storage-run\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.042546 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-buildworkdir\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.042595 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-node-pullsecrets\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.042721 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-blob-cache\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.043358 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-container-storage-run\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.043687 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-system-configs\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.043841 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-proxy-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.044056 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-container-storage-root\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.044184 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.048162 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-builder-dockercfg-bhkdz-pull\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.051938 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-builder-dockercfg-bhkdz-push\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.059893 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rs9l\" (UniqueName: \"kubernetes.io/projected/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-kube-api-access-4rs9l\") pod \"sg-bridge-2-build\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.197544 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.641669 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-2-build"] Oct 07 00:29:21 crc kubenswrapper[4791]: I1007 00:29:21.743295 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"d0d6f50d-07ad-40e3-8a5a-0e4058150eee","Type":"ContainerStarted","Data":"371a74d324e7b68d44bd314f6f6e12ae8d0e23399ee48cb62d3ea8453fb641d5"} Oct 07 00:29:22 crc kubenswrapper[4791]: I1007 00:29:22.752127 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"d0d6f50d-07ad-40e3-8a5a-0e4058150eee","Type":"ContainerStarted","Data":"35b6194ad782ae330f4bac44d0dd65eb84f96de7279b63847af85459aa53e2c2"} Oct 07 00:29:23 crc kubenswrapper[4791]: I1007 00:29:23.762077 4791 generic.go:334] "Generic (PLEG): container finished" podID="d0d6f50d-07ad-40e3-8a5a-0e4058150eee" containerID="35b6194ad782ae330f4bac44d0dd65eb84f96de7279b63847af85459aa53e2c2" exitCode=0 Oct 07 00:29:23 crc kubenswrapper[4791]: I1007 00:29:23.762165 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"d0d6f50d-07ad-40e3-8a5a-0e4058150eee","Type":"ContainerDied","Data":"35b6194ad782ae330f4bac44d0dd65eb84f96de7279b63847af85459aa53e2c2"} Oct 07 00:29:24 crc kubenswrapper[4791]: I1007 00:29:24.771095 4791 generic.go:334] "Generic (PLEG): container finished" podID="d0d6f50d-07ad-40e3-8a5a-0e4058150eee" containerID="b06fb32cfa1a7ba9f1b57142208eb0d174f5e2cf568e9914612f0d7fe2250bcb" exitCode=0 Oct 07 00:29:24 crc kubenswrapper[4791]: I1007 00:29:24.771170 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"d0d6f50d-07ad-40e3-8a5a-0e4058150eee","Type":"ContainerDied","Data":"b06fb32cfa1a7ba9f1b57142208eb0d174f5e2cf568e9914612f0d7fe2250bcb"} Oct 07 00:29:24 crc kubenswrapper[4791]: I1007 00:29:24.820801 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-2-build_d0d6f50d-07ad-40e3-8a5a-0e4058150eee/manage-dockerfile/0.log" Oct 07 00:29:25 crc kubenswrapper[4791]: I1007 00:29:25.781707 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"d0d6f50d-07ad-40e3-8a5a-0e4058150eee","Type":"ContainerStarted","Data":"7ec5346aa3287b25ada99704bb9d629089fe711421fa5baf4efa3e9c56ce98de"} Oct 07 00:29:25 crc kubenswrapper[4791]: I1007 00:29:25.805472 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-bridge-2-build" podStartSLOduration=5.805454 podStartE2EDuration="5.805454s" podCreationTimestamp="2025-10-07 00:29:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:29:25.802492543 +0000 UTC m=+1092.398430194" watchObservedRunningTime="2025-10-07 00:29:25.805454 +0000 UTC m=+1092.401391651" Oct 07 00:29:41 crc kubenswrapper[4791]: I1007 00:29:41.600374 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:29:41 crc kubenswrapper[4791]: I1007 00:29:41.601762 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.139681 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r"] Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.141057 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.144138 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.144247 4791 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.188911 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r"] Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.220066 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-secret-volume\") pod \"collect-profiles-29329950-2qs7r\" (UID: \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.220268 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6sdp\" (UniqueName: \"kubernetes.io/projected/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-kube-api-access-q6sdp\") pod \"collect-profiles-29329950-2qs7r\" (UID: \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.220368 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-config-volume\") pod \"collect-profiles-29329950-2qs7r\" (UID: \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.321561 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6sdp\" (UniqueName: \"kubernetes.io/projected/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-kube-api-access-q6sdp\") pod \"collect-profiles-29329950-2qs7r\" (UID: \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.321648 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-config-volume\") pod \"collect-profiles-29329950-2qs7r\" (UID: \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.321674 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-secret-volume\") pod \"collect-profiles-29329950-2qs7r\" (UID: \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.322578 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-config-volume\") pod \"collect-profiles-29329950-2qs7r\" (UID: \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.328470 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-secret-volume\") pod \"collect-profiles-29329950-2qs7r\" (UID: \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.339609 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6sdp\" (UniqueName: \"kubernetes.io/projected/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-kube-api-access-q6sdp\") pod \"collect-profiles-29329950-2qs7r\" (UID: \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.463858 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" Oct 07 00:30:00 crc kubenswrapper[4791]: I1007 00:30:00.675685 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r"] Oct 07 00:30:01 crc kubenswrapper[4791]: I1007 00:30:01.065462 4791 generic.go:334] "Generic (PLEG): container finished" podID="5fee2272-0d47-483f-bf5d-7b0aeaddb52b" containerID="2ba246772fa02b2544ffde796b59cdbdbf64936a52e6218e1722ed58c10cd8c3" exitCode=0 Oct 07 00:30:01 crc kubenswrapper[4791]: I1007 00:30:01.065817 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" event={"ID":"5fee2272-0d47-483f-bf5d-7b0aeaddb52b","Type":"ContainerDied","Data":"2ba246772fa02b2544ffde796b59cdbdbf64936a52e6218e1722ed58c10cd8c3"} Oct 07 00:30:01 crc kubenswrapper[4791]: I1007 00:30:01.065857 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" event={"ID":"5fee2272-0d47-483f-bf5d-7b0aeaddb52b","Type":"ContainerStarted","Data":"4b730eae22f04f0c21de11d3ba7ed3910c53066a28a25c5d539c671a258db04c"} Oct 07 00:30:02 crc kubenswrapper[4791]: I1007 00:30:02.266648 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" Oct 07 00:30:02 crc kubenswrapper[4791]: I1007 00:30:02.350605 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-secret-volume\") pod \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\" (UID: \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\") " Oct 07 00:30:02 crc kubenswrapper[4791]: I1007 00:30:02.350683 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-config-volume\") pod \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\" (UID: \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\") " Oct 07 00:30:02 crc kubenswrapper[4791]: I1007 00:30:02.350843 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6sdp\" (UniqueName: \"kubernetes.io/projected/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-kube-api-access-q6sdp\") pod \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\" (UID: \"5fee2272-0d47-483f-bf5d-7b0aeaddb52b\") " Oct 07 00:30:02 crc kubenswrapper[4791]: I1007 00:30:02.351663 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-config-volume" (OuterVolumeSpecName: "config-volume") pod "5fee2272-0d47-483f-bf5d-7b0aeaddb52b" (UID: "5fee2272-0d47-483f-bf5d-7b0aeaddb52b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:30:02 crc kubenswrapper[4791]: I1007 00:30:02.355661 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5fee2272-0d47-483f-bf5d-7b0aeaddb52b" (UID: "5fee2272-0d47-483f-bf5d-7b0aeaddb52b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:30:02 crc kubenswrapper[4791]: I1007 00:30:02.370471 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-kube-api-access-q6sdp" (OuterVolumeSpecName: "kube-api-access-q6sdp") pod "5fee2272-0d47-483f-bf5d-7b0aeaddb52b" (UID: "5fee2272-0d47-483f-bf5d-7b0aeaddb52b"). InnerVolumeSpecName "kube-api-access-q6sdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:30:02 crc kubenswrapper[4791]: I1007 00:30:02.452718 4791 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:02 crc kubenswrapper[4791]: I1007 00:30:02.452755 4791 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:02 crc kubenswrapper[4791]: I1007 00:30:02.452766 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6sdp\" (UniqueName: \"kubernetes.io/projected/5fee2272-0d47-483f-bf5d-7b0aeaddb52b-kube-api-access-q6sdp\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:03 crc kubenswrapper[4791]: I1007 00:30:03.080148 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" event={"ID":"5fee2272-0d47-483f-bf5d-7b0aeaddb52b","Type":"ContainerDied","Data":"4b730eae22f04f0c21de11d3ba7ed3910c53066a28a25c5d539c671a258db04c"} Oct 07 00:30:03 crc kubenswrapper[4791]: I1007 00:30:03.080199 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b730eae22f04f0c21de11d3ba7ed3910c53066a28a25c5d539c671a258db04c" Oct 07 00:30:03 crc kubenswrapper[4791]: I1007 00:30:03.080202 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329950-2qs7r" Oct 07 00:30:08 crc kubenswrapper[4791]: I1007 00:30:08.113173 4791 generic.go:334] "Generic (PLEG): container finished" podID="d0d6f50d-07ad-40e3-8a5a-0e4058150eee" containerID="7ec5346aa3287b25ada99704bb9d629089fe711421fa5baf4efa3e9c56ce98de" exitCode=0 Oct 07 00:30:08 crc kubenswrapper[4791]: I1007 00:30:08.113238 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"d0d6f50d-07ad-40e3-8a5a-0e4058150eee","Type":"ContainerDied","Data":"7ec5346aa3287b25ada99704bb9d629089fe711421fa5baf4efa3e9c56ce98de"} Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.362493 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.454023 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-node-pullsecrets\") pod \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.454081 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-buildcachedir\") pod \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.454163 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-container-storage-root\") pod \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.454152 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "d0d6f50d-07ad-40e3-8a5a-0e4058150eee" (UID: "d0d6f50d-07ad-40e3-8a5a-0e4058150eee"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.454207 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-proxy-ca-bundles\") pod \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.454234 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-blob-cache\") pod \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.454264 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "d0d6f50d-07ad-40e3-8a5a-0e4058150eee" (UID: "d0d6f50d-07ad-40e3-8a5a-0e4058150eee"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.454288 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-builder-dockercfg-bhkdz-push\") pod \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.454322 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-ca-bundles\") pod \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.454350 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-container-storage-run\") pod \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.454374 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rs9l\" (UniqueName: \"kubernetes.io/projected/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-kube-api-access-4rs9l\") pod \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.454391 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-buildworkdir\") pod \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.454451 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-system-configs\") pod \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.454477 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-builder-dockercfg-bhkdz-pull\") pod \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\" (UID: \"d0d6f50d-07ad-40e3-8a5a-0e4058150eee\") " Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.455087 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "d0d6f50d-07ad-40e3-8a5a-0e4058150eee" (UID: "d0d6f50d-07ad-40e3-8a5a-0e4058150eee"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.455150 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "d0d6f50d-07ad-40e3-8a5a-0e4058150eee" (UID: "d0d6f50d-07ad-40e3-8a5a-0e4058150eee"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.455339 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "d0d6f50d-07ad-40e3-8a5a-0e4058150eee" (UID: "d0d6f50d-07ad-40e3-8a5a-0e4058150eee"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.455378 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.455477 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.455493 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.455505 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.455514 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "d0d6f50d-07ad-40e3-8a5a-0e4058150eee" (UID: "d0d6f50d-07ad-40e3-8a5a-0e4058150eee"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.455814 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "d0d6f50d-07ad-40e3-8a5a-0e4058150eee" (UID: "d0d6f50d-07ad-40e3-8a5a-0e4058150eee"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.460475 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "d0d6f50d-07ad-40e3-8a5a-0e4058150eee" (UID: "d0d6f50d-07ad-40e3-8a5a-0e4058150eee"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.460764 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "d0d6f50d-07ad-40e3-8a5a-0e4058150eee" (UID: "d0d6f50d-07ad-40e3-8a5a-0e4058150eee"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.461543 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-kube-api-access-4rs9l" (OuterVolumeSpecName: "kube-api-access-4rs9l") pod "d0d6f50d-07ad-40e3-8a5a-0e4058150eee" (UID: "d0d6f50d-07ad-40e3-8a5a-0e4058150eee"). InnerVolumeSpecName "kube-api-access-4rs9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.557577 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.557632 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.557645 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.557656 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rs9l\" (UniqueName: \"kubernetes.io/projected/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-kube-api-access-4rs9l\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.557668 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.557680 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.587101 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "d0d6f50d-07ad-40e3-8a5a-0e4058150eee" (UID: "d0d6f50d-07ad-40e3-8a5a-0e4058150eee"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:30:09 crc kubenswrapper[4791]: I1007 00:30:09.658907 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:10 crc kubenswrapper[4791]: I1007 00:30:10.121482 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "d0d6f50d-07ad-40e3-8a5a-0e4058150eee" (UID: "d0d6f50d-07ad-40e3-8a5a-0e4058150eee"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:30:10 crc kubenswrapper[4791]: I1007 00:30:10.133043 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"d0d6f50d-07ad-40e3-8a5a-0e4058150eee","Type":"ContainerDied","Data":"371a74d324e7b68d44bd314f6f6e12ae8d0e23399ee48cb62d3ea8453fb641d5"} Oct 07 00:30:10 crc kubenswrapper[4791]: I1007 00:30:10.133094 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="371a74d324e7b68d44bd314f6f6e12ae8d0e23399ee48cb62d3ea8453fb641d5" Oct 07 00:30:10 crc kubenswrapper[4791]: I1007 00:30:10.133211 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Oct 07 00:30:10 crc kubenswrapper[4791]: I1007 00:30:10.165237 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d0d6f50d-07ad-40e3-8a5a-0e4058150eee-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:11 crc kubenswrapper[4791]: I1007 00:30:11.601133 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:30:11 crc kubenswrapper[4791]: I1007 00:30:11.601229 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:30:11 crc kubenswrapper[4791]: I1007 00:30:11.601350 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:30:11 crc kubenswrapper[4791]: I1007 00:30:11.602180 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"707f6b1f578a3829a964bf21bc15d9b1043ee4b1415c1be3d0c64a4ecaf4fa34"} pod="openshift-machine-config-operator/machine-config-daemon-h728c" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 00:30:11 crc kubenswrapper[4791]: I1007 00:30:11.602274 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" containerID="cri-o://707f6b1f578a3829a964bf21bc15d9b1043ee4b1415c1be3d0c64a4ecaf4fa34" gracePeriod=600 Oct 07 00:30:12 crc kubenswrapper[4791]: I1007 00:30:12.154523 4791 generic.go:334] "Generic (PLEG): container finished" podID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerID="707f6b1f578a3829a964bf21bc15d9b1043ee4b1415c1be3d0c64a4ecaf4fa34" exitCode=0 Oct 07 00:30:12 crc kubenswrapper[4791]: I1007 00:30:12.154572 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerDied","Data":"707f6b1f578a3829a964bf21bc15d9b1043ee4b1415c1be3d0c64a4ecaf4fa34"} Oct 07 00:30:12 crc kubenswrapper[4791]: I1007 00:30:12.155554 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerStarted","Data":"186c564d61df70f559e1048abd8501416d9bf37bf9acf5cdce844554cae2f448"} Oct 07 00:30:12 crc kubenswrapper[4791]: I1007 00:30:12.155612 4791 scope.go:117] "RemoveContainer" containerID="7cfedf539256dfacf0cf2c9e6ad1df35a6f94ed8695670a64b90aaad70e4d317" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.289535 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Oct 07 00:30:15 crc kubenswrapper[4791]: E1007 00:30:15.290617 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d6f50d-07ad-40e3-8a5a-0e4058150eee" containerName="manage-dockerfile" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.290634 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d6f50d-07ad-40e3-8a5a-0e4058150eee" containerName="manage-dockerfile" Oct 07 00:30:15 crc kubenswrapper[4791]: E1007 00:30:15.290646 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d6f50d-07ad-40e3-8a5a-0e4058150eee" containerName="git-clone" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.290657 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d6f50d-07ad-40e3-8a5a-0e4058150eee" containerName="git-clone" Oct 07 00:30:15 crc kubenswrapper[4791]: E1007 00:30:15.290674 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fee2272-0d47-483f-bf5d-7b0aeaddb52b" containerName="collect-profiles" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.290683 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fee2272-0d47-483f-bf5d-7b0aeaddb52b" containerName="collect-profiles" Oct 07 00:30:15 crc kubenswrapper[4791]: E1007 00:30:15.290695 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d6f50d-07ad-40e3-8a5a-0e4058150eee" containerName="docker-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.290701 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d6f50d-07ad-40e3-8a5a-0e4058150eee" containerName="docker-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.290820 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fee2272-0d47-483f-bf5d-7b0aeaddb52b" containerName="collect-profiles" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.290837 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0d6f50d-07ad-40e3-8a5a-0e4058150eee" containerName="docker-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.291659 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.293945 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-bhkdz" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.294169 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-1-sys-config" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.294482 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-1-ca" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.294504 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-1-global-ca" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.305770 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.437950 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-build-blob-cache\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.438006 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/76d5751d-6330-4ae6-8450-4ee71168e10f-builder-dockercfg-bhkdz-push\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.438054 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.438071 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qqc9\" (UniqueName: \"kubernetes.io/projected/76d5751d-6330-4ae6-8450-4ee71168e10f-kube-api-access-2qqc9\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.438090 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/76d5751d-6330-4ae6-8450-4ee71168e10f-node-pullsecrets\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.438118 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-container-storage-run\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.438138 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-buildworkdir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.438159 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/76d5751d-6330-4ae6-8450-4ee71168e10f-buildcachedir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.438269 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-container-storage-root\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.438365 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/76d5751d-6330-4ae6-8450-4ee71168e10f-builder-dockercfg-bhkdz-pull\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.438436 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-system-configs\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.438538 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.539366 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-container-storage-run\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.539418 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-buildworkdir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.539435 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/76d5751d-6330-4ae6-8450-4ee71168e10f-buildcachedir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.539455 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-container-storage-root\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.539473 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/76d5751d-6330-4ae6-8450-4ee71168e10f-builder-dockercfg-bhkdz-pull\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.539489 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-system-configs\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.539526 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.539557 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-build-blob-cache\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.539577 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/76d5751d-6330-4ae6-8450-4ee71168e10f-builder-dockercfg-bhkdz-push\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.539612 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.539629 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qqc9\" (UniqueName: \"kubernetes.io/projected/76d5751d-6330-4ae6-8450-4ee71168e10f-kube-api-access-2qqc9\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.539645 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/76d5751d-6330-4ae6-8450-4ee71168e10f-node-pullsecrets\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.539810 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/76d5751d-6330-4ae6-8450-4ee71168e10f-node-pullsecrets\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.540181 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/76d5751d-6330-4ae6-8450-4ee71168e10f-buildcachedir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.540565 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.540612 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-system-configs\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.540787 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-container-storage-run\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.540927 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-buildworkdir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.541111 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-container-storage-root\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.541459 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-build-blob-cache\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.541773 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.547857 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/76d5751d-6330-4ae6-8450-4ee71168e10f-builder-dockercfg-bhkdz-push\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.548893 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/76d5751d-6330-4ae6-8450-4ee71168e10f-builder-dockercfg-bhkdz-pull\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.557142 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qqc9\" (UniqueName: \"kubernetes.io/projected/76d5751d-6330-4ae6-8450-4ee71168e10f-kube-api-access-2qqc9\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.613594 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:15 crc kubenswrapper[4791]: I1007 00:30:15.816243 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Oct 07 00:30:15 crc kubenswrapper[4791]: W1007 00:30:15.827704 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76d5751d_6330_4ae6_8450_4ee71168e10f.slice/crio-5236d5d6b45b00da991d67805d150c8c5226e86e0ba3b3393988f2e703e5efed WatchSource:0}: Error finding container 5236d5d6b45b00da991d67805d150c8c5226e86e0ba3b3393988f2e703e5efed: Status 404 returned error can't find the container with id 5236d5d6b45b00da991d67805d150c8c5226e86e0ba3b3393988f2e703e5efed Oct 07 00:30:16 crc kubenswrapper[4791]: I1007 00:30:16.206318 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"76d5751d-6330-4ae6-8450-4ee71168e10f","Type":"ContainerStarted","Data":"3517620dc1565c5a81555cfb31183f537ee19420855b18402b94ae38883571f4"} Oct 07 00:30:16 crc kubenswrapper[4791]: I1007 00:30:16.206392 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"76d5751d-6330-4ae6-8450-4ee71168e10f","Type":"ContainerStarted","Data":"5236d5d6b45b00da991d67805d150c8c5226e86e0ba3b3393988f2e703e5efed"} Oct 07 00:30:17 crc kubenswrapper[4791]: I1007 00:30:17.217163 4791 generic.go:334] "Generic (PLEG): container finished" podID="76d5751d-6330-4ae6-8450-4ee71168e10f" containerID="3517620dc1565c5a81555cfb31183f537ee19420855b18402b94ae38883571f4" exitCode=0 Oct 07 00:30:17 crc kubenswrapper[4791]: I1007 00:30:17.217245 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"76d5751d-6330-4ae6-8450-4ee71168e10f","Type":"ContainerDied","Data":"3517620dc1565c5a81555cfb31183f537ee19420855b18402b94ae38883571f4"} Oct 07 00:30:18 crc kubenswrapper[4791]: I1007 00:30:18.228607 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"76d5751d-6330-4ae6-8450-4ee71168e10f","Type":"ContainerStarted","Data":"287f6e1c68e248f7cc9abe6a3689229a547edcc5da03c6b6f0f6883bae64d552"} Oct 07 00:30:18 crc kubenswrapper[4791]: I1007 00:30:18.260765 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-webhook-snmp-1-build" podStartSLOduration=3.260728152 podStartE2EDuration="3.260728152s" podCreationTimestamp="2025-10-07 00:30:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:30:18.252076366 +0000 UTC m=+1144.848014017" watchObservedRunningTime="2025-10-07 00:30:18.260728152 +0000 UTC m=+1144.856665843" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.306774 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.311229 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/prometheus-webhook-snmp-1-build" podUID="76d5751d-6330-4ae6-8450-4ee71168e10f" containerName="docker-build" containerID="cri-o://287f6e1c68e248f7cc9abe6a3689229a547edcc5da03c6b6f0f6883bae64d552" gracePeriod=30 Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.671360 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-1-build_76d5751d-6330-4ae6-8450-4ee71168e10f/docker-build/0.log" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.672144 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.800187 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qqc9\" (UniqueName: \"kubernetes.io/projected/76d5751d-6330-4ae6-8450-4ee71168e10f-kube-api-access-2qqc9\") pod \"76d5751d-6330-4ae6-8450-4ee71168e10f\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.800287 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/76d5751d-6330-4ae6-8450-4ee71168e10f-builder-dockercfg-bhkdz-push\") pod \"76d5751d-6330-4ae6-8450-4ee71168e10f\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.800392 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-ca-bundles\") pod \"76d5751d-6330-4ae6-8450-4ee71168e10f\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.800452 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-buildworkdir\") pod \"76d5751d-6330-4ae6-8450-4ee71168e10f\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.800510 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/76d5751d-6330-4ae6-8450-4ee71168e10f-builder-dockercfg-bhkdz-pull\") pod \"76d5751d-6330-4ae6-8450-4ee71168e10f\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.800532 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-system-configs\") pod \"76d5751d-6330-4ae6-8450-4ee71168e10f\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.800549 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/76d5751d-6330-4ae6-8450-4ee71168e10f-node-pullsecrets\") pod \"76d5751d-6330-4ae6-8450-4ee71168e10f\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.800587 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-proxy-ca-bundles\") pod \"76d5751d-6330-4ae6-8450-4ee71168e10f\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.800638 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-container-storage-root\") pod \"76d5751d-6330-4ae6-8450-4ee71168e10f\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.800680 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-build-blob-cache\") pod \"76d5751d-6330-4ae6-8450-4ee71168e10f\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.800723 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-container-storage-run\") pod \"76d5751d-6330-4ae6-8450-4ee71168e10f\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.800756 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/76d5751d-6330-4ae6-8450-4ee71168e10f-buildcachedir\") pod \"76d5751d-6330-4ae6-8450-4ee71168e10f\" (UID: \"76d5751d-6330-4ae6-8450-4ee71168e10f\") " Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.800971 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76d5751d-6330-4ae6-8450-4ee71168e10f-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "76d5751d-6330-4ae6-8450-4ee71168e10f" (UID: "76d5751d-6330-4ae6-8450-4ee71168e10f"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.801027 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "76d5751d-6330-4ae6-8450-4ee71168e10f" (UID: "76d5751d-6330-4ae6-8450-4ee71168e10f"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.801050 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76d5751d-6330-4ae6-8450-4ee71168e10f-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "76d5751d-6330-4ae6-8450-4ee71168e10f" (UID: "76d5751d-6330-4ae6-8450-4ee71168e10f"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.801788 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "76d5751d-6330-4ae6-8450-4ee71168e10f" (UID: "76d5751d-6330-4ae6-8450-4ee71168e10f"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.801885 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "76d5751d-6330-4ae6-8450-4ee71168e10f" (UID: "76d5751d-6330-4ae6-8450-4ee71168e10f"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.801949 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "76d5751d-6330-4ae6-8450-4ee71168e10f" (UID: "76d5751d-6330-4ae6-8450-4ee71168e10f"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.802620 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "76d5751d-6330-4ae6-8450-4ee71168e10f" (UID: "76d5751d-6330-4ae6-8450-4ee71168e10f"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.806767 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76d5751d-6330-4ae6-8450-4ee71168e10f-kube-api-access-2qqc9" (OuterVolumeSpecName: "kube-api-access-2qqc9") pod "76d5751d-6330-4ae6-8450-4ee71168e10f" (UID: "76d5751d-6330-4ae6-8450-4ee71168e10f"). InnerVolumeSpecName "kube-api-access-2qqc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.806989 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76d5751d-6330-4ae6-8450-4ee71168e10f-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "76d5751d-6330-4ae6-8450-4ee71168e10f" (UID: "76d5751d-6330-4ae6-8450-4ee71168e10f"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.807510 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76d5751d-6330-4ae6-8450-4ee71168e10f-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "76d5751d-6330-4ae6-8450-4ee71168e10f" (UID: "76d5751d-6330-4ae6-8450-4ee71168e10f"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.857718 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "76d5751d-6330-4ae6-8450-4ee71168e10f" (UID: "76d5751d-6330-4ae6-8450-4ee71168e10f"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.903431 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/76d5751d-6330-4ae6-8450-4ee71168e10f-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.903465 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qqc9\" (UniqueName: \"kubernetes.io/projected/76d5751d-6330-4ae6-8450-4ee71168e10f-kube-api-access-2qqc9\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.903477 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/76d5751d-6330-4ae6-8450-4ee71168e10f-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.903505 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.903515 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.903525 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/76d5751d-6330-4ae6-8450-4ee71168e10f-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.903534 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.903541 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/76d5751d-6330-4ae6-8450-4ee71168e10f-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.903550 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/76d5751d-6330-4ae6-8450-4ee71168e10f-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.903557 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:25 crc kubenswrapper[4791]: I1007 00:30:25.903586 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.108504 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "76d5751d-6330-4ae6-8450-4ee71168e10f" (UID: "76d5751d-6330-4ae6-8450-4ee71168e10f"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.207658 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/76d5751d-6330-4ae6-8450-4ee71168e10f-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.287286 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-1-build_76d5751d-6330-4ae6-8450-4ee71168e10f/docker-build/0.log" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.287843 4791 generic.go:334] "Generic (PLEG): container finished" podID="76d5751d-6330-4ae6-8450-4ee71168e10f" containerID="287f6e1c68e248f7cc9abe6a3689229a547edcc5da03c6b6f0f6883bae64d552" exitCode=1 Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.287888 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"76d5751d-6330-4ae6-8450-4ee71168e10f","Type":"ContainerDied","Data":"287f6e1c68e248f7cc9abe6a3689229a547edcc5da03c6b6f0f6883bae64d552"} Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.287927 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"76d5751d-6330-4ae6-8450-4ee71168e10f","Type":"ContainerDied","Data":"5236d5d6b45b00da991d67805d150c8c5226e86e0ba3b3393988f2e703e5efed"} Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.287951 4791 scope.go:117] "RemoveContainer" containerID="287f6e1c68e248f7cc9abe6a3689229a547edcc5da03c6b6f0f6883bae64d552" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.287984 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.309195 4791 scope.go:117] "RemoveContainer" containerID="3517620dc1565c5a81555cfb31183f537ee19420855b18402b94ae38883571f4" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.325776 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.333152 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.345573 4791 scope.go:117] "RemoveContainer" containerID="287f6e1c68e248f7cc9abe6a3689229a547edcc5da03c6b6f0f6883bae64d552" Oct 07 00:30:26 crc kubenswrapper[4791]: E1007 00:30:26.346106 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"287f6e1c68e248f7cc9abe6a3689229a547edcc5da03c6b6f0f6883bae64d552\": container with ID starting with 287f6e1c68e248f7cc9abe6a3689229a547edcc5da03c6b6f0f6883bae64d552 not found: ID does not exist" containerID="287f6e1c68e248f7cc9abe6a3689229a547edcc5da03c6b6f0f6883bae64d552" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.346222 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"287f6e1c68e248f7cc9abe6a3689229a547edcc5da03c6b6f0f6883bae64d552"} err="failed to get container status \"287f6e1c68e248f7cc9abe6a3689229a547edcc5da03c6b6f0f6883bae64d552\": rpc error: code = NotFound desc = could not find container \"287f6e1c68e248f7cc9abe6a3689229a547edcc5da03c6b6f0f6883bae64d552\": container with ID starting with 287f6e1c68e248f7cc9abe6a3689229a547edcc5da03c6b6f0f6883bae64d552 not found: ID does not exist" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.346256 4791 scope.go:117] "RemoveContainer" containerID="3517620dc1565c5a81555cfb31183f537ee19420855b18402b94ae38883571f4" Oct 07 00:30:26 crc kubenswrapper[4791]: E1007 00:30:26.346642 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3517620dc1565c5a81555cfb31183f537ee19420855b18402b94ae38883571f4\": container with ID starting with 3517620dc1565c5a81555cfb31183f537ee19420855b18402b94ae38883571f4 not found: ID does not exist" containerID="3517620dc1565c5a81555cfb31183f537ee19420855b18402b94ae38883571f4" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.346689 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3517620dc1565c5a81555cfb31183f537ee19420855b18402b94ae38883571f4"} err="failed to get container status \"3517620dc1565c5a81555cfb31183f537ee19420855b18402b94ae38883571f4\": rpc error: code = NotFound desc = could not find container \"3517620dc1565c5a81555cfb31183f537ee19420855b18402b94ae38883571f4\": container with ID starting with 3517620dc1565c5a81555cfb31183f537ee19420855b18402b94ae38883571f4 not found: ID does not exist" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.972632 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-webhook-snmp-2-build"] Oct 07 00:30:26 crc kubenswrapper[4791]: E1007 00:30:26.972897 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76d5751d-6330-4ae6-8450-4ee71168e10f" containerName="docker-build" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.972911 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="76d5751d-6330-4ae6-8450-4ee71168e10f" containerName="docker-build" Oct 07 00:30:26 crc kubenswrapper[4791]: E1007 00:30:26.972930 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76d5751d-6330-4ae6-8450-4ee71168e10f" containerName="manage-dockerfile" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.972936 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="76d5751d-6330-4ae6-8450-4ee71168e10f" containerName="manage-dockerfile" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.973041 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="76d5751d-6330-4ae6-8450-4ee71168e10f" containerName="docker-build" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.974009 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.978477 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-2-sys-config" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.978514 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-2-ca" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.978699 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-2-global-ca" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.978755 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-bhkdz" Oct 07 00:30:26 crc kubenswrapper[4791]: I1007 00:30:26.990901 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-2-build"] Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.120117 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-container-storage-root\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.120177 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/84c04cf5-13fd-4bbb-840a-b50b23a59673-builder-dockercfg-bhkdz-push\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.120217 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.120243 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-blob-cache\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.120267 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-system-configs\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.120371 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-buildworkdir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.120519 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q7qm\" (UniqueName: \"kubernetes.io/projected/84c04cf5-13fd-4bbb-840a-b50b23a59673-kube-api-access-6q7qm\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.120546 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/84c04cf5-13fd-4bbb-840a-b50b23a59673-builder-dockercfg-bhkdz-pull\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.120572 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-container-storage-run\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.120614 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.120643 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/84c04cf5-13fd-4bbb-840a-b50b23a59673-buildcachedir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.120701 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/84c04cf5-13fd-4bbb-840a-b50b23a59673-node-pullsecrets\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.222100 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-buildworkdir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.222160 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q7qm\" (UniqueName: \"kubernetes.io/projected/84c04cf5-13fd-4bbb-840a-b50b23a59673-kube-api-access-6q7qm\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.222196 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/84c04cf5-13fd-4bbb-840a-b50b23a59673-builder-dockercfg-bhkdz-pull\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.222225 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-container-storage-run\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.222272 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.222295 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/84c04cf5-13fd-4bbb-840a-b50b23a59673-buildcachedir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.222326 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/84c04cf5-13fd-4bbb-840a-b50b23a59673-node-pullsecrets\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.222359 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-container-storage-root\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.222386 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/84c04cf5-13fd-4bbb-840a-b50b23a59673-builder-dockercfg-bhkdz-push\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.222429 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.222454 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-blob-cache\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.222479 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-system-configs\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.222477 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/84c04cf5-13fd-4bbb-840a-b50b23a59673-buildcachedir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.222623 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-buildworkdir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.223243 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/84c04cf5-13fd-4bbb-840a-b50b23a59673-node-pullsecrets\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.223277 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-system-configs\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.223495 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-container-storage-root\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.223323 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.223634 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-blob-cache\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.223703 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.223731 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-container-storage-run\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.234024 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/84c04cf5-13fd-4bbb-840a-b50b23a59673-builder-dockercfg-bhkdz-pull\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.234085 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/84c04cf5-13fd-4bbb-840a-b50b23a59673-builder-dockercfg-bhkdz-push\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.248708 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q7qm\" (UniqueName: \"kubernetes.io/projected/84c04cf5-13fd-4bbb-840a-b50b23a59673-kube-api-access-6q7qm\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.292628 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:30:27 crc kubenswrapper[4791]: I1007 00:30:27.555666 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-2-build"] Oct 07 00:30:28 crc kubenswrapper[4791]: I1007 00:30:28.079000 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76d5751d-6330-4ae6-8450-4ee71168e10f" path="/var/lib/kubelet/pods/76d5751d-6330-4ae6-8450-4ee71168e10f/volumes" Oct 07 00:30:28 crc kubenswrapper[4791]: I1007 00:30:28.308977 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"84c04cf5-13fd-4bbb-840a-b50b23a59673","Type":"ContainerStarted","Data":"b54a5dc8a57a2558ee4a0be0cff5d92364fa12496c357a8c38d79624a6749e6c"} Oct 07 00:30:28 crc kubenswrapper[4791]: I1007 00:30:28.309037 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"84c04cf5-13fd-4bbb-840a-b50b23a59673","Type":"ContainerStarted","Data":"43c20b3c5b800fe8a58c7f6b224fd277f231bf879571e895781dcfb4d85c1be2"} Oct 07 00:30:29 crc kubenswrapper[4791]: I1007 00:30:29.316726 4791 generic.go:334] "Generic (PLEG): container finished" podID="84c04cf5-13fd-4bbb-840a-b50b23a59673" containerID="b54a5dc8a57a2558ee4a0be0cff5d92364fa12496c357a8c38d79624a6749e6c" exitCode=0 Oct 07 00:30:29 crc kubenswrapper[4791]: I1007 00:30:29.316782 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"84c04cf5-13fd-4bbb-840a-b50b23a59673","Type":"ContainerDied","Data":"b54a5dc8a57a2558ee4a0be0cff5d92364fa12496c357a8c38d79624a6749e6c"} Oct 07 00:30:29 crc kubenswrapper[4791]: E1007 00:30:29.721771 4791 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84c04cf5_13fd_4bbb_840a_b50b23a59673.slice/crio-conmon-d24bd91fe756a32235a31b05d378c682ba0e4cda9f4bc3350e70c3c874178e05.scope\": RecentStats: unable to find data in memory cache]" Oct 07 00:30:30 crc kubenswrapper[4791]: I1007 00:30:30.325776 4791 generic.go:334] "Generic (PLEG): container finished" podID="84c04cf5-13fd-4bbb-840a-b50b23a59673" containerID="d24bd91fe756a32235a31b05d378c682ba0e4cda9f4bc3350e70c3c874178e05" exitCode=0 Oct 07 00:30:30 crc kubenswrapper[4791]: I1007 00:30:30.325892 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"84c04cf5-13fd-4bbb-840a-b50b23a59673","Type":"ContainerDied","Data":"d24bd91fe756a32235a31b05d378c682ba0e4cda9f4bc3350e70c3c874178e05"} Oct 07 00:30:30 crc kubenswrapper[4791]: I1007 00:30:30.371730 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-2-build_84c04cf5-13fd-4bbb-840a-b50b23a59673/manage-dockerfile/0.log" Oct 07 00:30:31 crc kubenswrapper[4791]: I1007 00:30:31.337922 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"84c04cf5-13fd-4bbb-840a-b50b23a59673","Type":"ContainerStarted","Data":"a6fa74f5ed887429402f8015b5de07ef0a0972b03db68fef22b52b7334b7ab7a"} Oct 07 00:30:31 crc kubenswrapper[4791]: I1007 00:30:31.365677 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-webhook-snmp-2-build" podStartSLOduration=5.365652319 podStartE2EDuration="5.365652319s" podCreationTimestamp="2025-10-07 00:30:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:30:31.361708962 +0000 UTC m=+1157.957646613" watchObservedRunningTime="2025-10-07 00:30:31.365652319 +0000 UTC m=+1157.961589980" Oct 07 00:31:25 crc kubenswrapper[4791]: I1007 00:31:25.723231 4791 generic.go:334] "Generic (PLEG): container finished" podID="84c04cf5-13fd-4bbb-840a-b50b23a59673" containerID="a6fa74f5ed887429402f8015b5de07ef0a0972b03db68fef22b52b7334b7ab7a" exitCode=0 Oct 07 00:31:25 crc kubenswrapper[4791]: I1007 00:31:25.723313 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"84c04cf5-13fd-4bbb-840a-b50b23a59673","Type":"ContainerDied","Data":"a6fa74f5ed887429402f8015b5de07ef0a0972b03db68fef22b52b7334b7ab7a"} Oct 07 00:31:26 crc kubenswrapper[4791]: I1007 00:31:26.967383 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.127919 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/84c04cf5-13fd-4bbb-840a-b50b23a59673-builder-dockercfg-bhkdz-pull\") pod \"84c04cf5-13fd-4bbb-840a-b50b23a59673\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.128024 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6q7qm\" (UniqueName: \"kubernetes.io/projected/84c04cf5-13fd-4bbb-840a-b50b23a59673-kube-api-access-6q7qm\") pod \"84c04cf5-13fd-4bbb-840a-b50b23a59673\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.128290 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-ca-bundles\") pod \"84c04cf5-13fd-4bbb-840a-b50b23a59673\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.128322 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/84c04cf5-13fd-4bbb-840a-b50b23a59673-node-pullsecrets\") pod \"84c04cf5-13fd-4bbb-840a-b50b23a59673\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.128356 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-proxy-ca-bundles\") pod \"84c04cf5-13fd-4bbb-840a-b50b23a59673\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.128452 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-system-configs\") pod \"84c04cf5-13fd-4bbb-840a-b50b23a59673\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.128464 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/84c04cf5-13fd-4bbb-840a-b50b23a59673-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "84c04cf5-13fd-4bbb-840a-b50b23a59673" (UID: "84c04cf5-13fd-4bbb-840a-b50b23a59673"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.128487 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-container-storage-root\") pod \"84c04cf5-13fd-4bbb-840a-b50b23a59673\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.128598 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/84c04cf5-13fd-4bbb-840a-b50b23a59673-buildcachedir\") pod \"84c04cf5-13fd-4bbb-840a-b50b23a59673\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.128638 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-container-storage-run\") pod \"84c04cf5-13fd-4bbb-840a-b50b23a59673\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.128702 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/84c04cf5-13fd-4bbb-840a-b50b23a59673-builder-dockercfg-bhkdz-push\") pod \"84c04cf5-13fd-4bbb-840a-b50b23a59673\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.128734 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-blob-cache\") pod \"84c04cf5-13fd-4bbb-840a-b50b23a59673\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.128715 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/84c04cf5-13fd-4bbb-840a-b50b23a59673-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "84c04cf5-13fd-4bbb-840a-b50b23a59673" (UID: "84c04cf5-13fd-4bbb-840a-b50b23a59673"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.128765 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-buildworkdir\") pod \"84c04cf5-13fd-4bbb-840a-b50b23a59673\" (UID: \"84c04cf5-13fd-4bbb-840a-b50b23a59673\") " Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.129271 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "84c04cf5-13fd-4bbb-840a-b50b23a59673" (UID: "84c04cf5-13fd-4bbb-840a-b50b23a59673"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.129288 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "84c04cf5-13fd-4bbb-840a-b50b23a59673" (UID: "84c04cf5-13fd-4bbb-840a-b50b23a59673"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.129446 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.129461 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/84c04cf5-13fd-4bbb-840a-b50b23a59673-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.129472 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.129483 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/84c04cf5-13fd-4bbb-840a-b50b23a59673-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.129501 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "84c04cf5-13fd-4bbb-840a-b50b23a59673" (UID: "84c04cf5-13fd-4bbb-840a-b50b23a59673"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.130623 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "84c04cf5-13fd-4bbb-840a-b50b23a59673" (UID: "84c04cf5-13fd-4bbb-840a-b50b23a59673"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.130784 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "84c04cf5-13fd-4bbb-840a-b50b23a59673" (UID: "84c04cf5-13fd-4bbb-840a-b50b23a59673"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.135100 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84c04cf5-13fd-4bbb-840a-b50b23a59673-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "84c04cf5-13fd-4bbb-840a-b50b23a59673" (UID: "84c04cf5-13fd-4bbb-840a-b50b23a59673"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.135598 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84c04cf5-13fd-4bbb-840a-b50b23a59673-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "84c04cf5-13fd-4bbb-840a-b50b23a59673" (UID: "84c04cf5-13fd-4bbb-840a-b50b23a59673"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.139799 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84c04cf5-13fd-4bbb-840a-b50b23a59673-kube-api-access-6q7qm" (OuterVolumeSpecName: "kube-api-access-6q7qm") pod "84c04cf5-13fd-4bbb-840a-b50b23a59673" (UID: "84c04cf5-13fd-4bbb-840a-b50b23a59673"). InnerVolumeSpecName "kube-api-access-6q7qm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.225257 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "84c04cf5-13fd-4bbb-840a-b50b23a59673" (UID: "84c04cf5-13fd-4bbb-840a-b50b23a59673"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.231046 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.231087 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/84c04cf5-13fd-4bbb-840a-b50b23a59673-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.231106 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6q7qm\" (UniqueName: \"kubernetes.io/projected/84c04cf5-13fd-4bbb-840a-b50b23a59673-kube-api-access-6q7qm\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.231119 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.231130 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.231142 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/84c04cf5-13fd-4bbb-840a-b50b23a59673-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.231153 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.739575 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"84c04cf5-13fd-4bbb-840a-b50b23a59673","Type":"ContainerDied","Data":"43c20b3c5b800fe8a58c7f6b224fd277f231bf879571e895781dcfb4d85c1be2"} Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.739626 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43c20b3c5b800fe8a58c7f6b224fd277f231bf879571e895781dcfb4d85c1be2" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.739680 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.907862 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "84c04cf5-13fd-4bbb-840a-b50b23a59673" (UID: "84c04cf5-13fd-4bbb-840a-b50b23a59673"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:31:27 crc kubenswrapper[4791]: I1007 00:31:27.941027 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/84c04cf5-13fd-4bbb-840a-b50b23a59673-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.687950 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Oct 07 00:31:36 crc kubenswrapper[4791]: E1007 00:31:36.688678 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84c04cf5-13fd-4bbb-840a-b50b23a59673" containerName="git-clone" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.688692 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="84c04cf5-13fd-4bbb-840a-b50b23a59673" containerName="git-clone" Oct 07 00:31:36 crc kubenswrapper[4791]: E1007 00:31:36.688704 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84c04cf5-13fd-4bbb-840a-b50b23a59673" containerName="docker-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.688711 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="84c04cf5-13fd-4bbb-840a-b50b23a59673" containerName="docker-build" Oct 07 00:31:36 crc kubenswrapper[4791]: E1007 00:31:36.688720 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84c04cf5-13fd-4bbb-840a-b50b23a59673" containerName="manage-dockerfile" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.688726 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="84c04cf5-13fd-4bbb-840a-b50b23a59673" containerName="manage-dockerfile" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.688853 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="84c04cf5-13fd-4bbb-840a-b50b23a59673" containerName="docker-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.689525 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.691597 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-1-sys-config" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.692029 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-1-global-ca" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.692291 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-1-ca" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.692529 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-bhkdz" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.702737 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.867983 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/44051cd0-3075-421c-8de3-1a8b1a91517c-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.868436 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.868473 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/44051cd0-3075-421c-8de3-1a8b1a91517c-buildcachedir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.868505 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hdjf\" (UniqueName: \"kubernetes.io/projected/44051cd0-3075-421c-8de3-1a8b1a91517c-kube-api-access-2hdjf\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.868534 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.868557 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-system-configs\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.868738 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/44051cd0-3075-421c-8de3-1a8b1a91517c-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.868804 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-container-storage-run\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.868910 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-build-blob-cache\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.868944 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/44051cd0-3075-421c-8de3-1a8b1a91517c-node-pullsecrets\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.869059 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-container-storage-root\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.869089 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-buildworkdir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.969517 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-buildworkdir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.969592 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/44051cd0-3075-421c-8de3-1a8b1a91517c-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.969616 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.969638 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/44051cd0-3075-421c-8de3-1a8b1a91517c-buildcachedir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.969666 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hdjf\" (UniqueName: \"kubernetes.io/projected/44051cd0-3075-421c-8de3-1a8b1a91517c-kube-api-access-2hdjf\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.969687 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.969707 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-system-configs\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.969744 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/44051cd0-3075-421c-8de3-1a8b1a91517c-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.969762 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-container-storage-run\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.969779 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-build-blob-cache\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.969795 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/44051cd0-3075-421c-8de3-1a8b1a91517c-node-pullsecrets\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.969904 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/44051cd0-3075-421c-8de3-1a8b1a91517c-buildcachedir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.969994 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-buildworkdir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.970161 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/44051cd0-3075-421c-8de3-1a8b1a91517c-node-pullsecrets\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.970339 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-container-storage-run\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.970522 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.970558 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-build-blob-cache\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.970519 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-container-storage-root\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.970780 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.970822 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-container-storage-root\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.971295 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-system-configs\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.975593 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/44051cd0-3075-421c-8de3-1a8b1a91517c-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.976305 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/44051cd0-3075-421c-8de3-1a8b1a91517c-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:36 crc kubenswrapper[4791]: I1007 00:31:36.991274 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hdjf\" (UniqueName: \"kubernetes.io/projected/44051cd0-3075-421c-8de3-1a8b1a91517c-kube-api-access-2hdjf\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:37 crc kubenswrapper[4791]: I1007 00:31:37.013737 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:37 crc kubenswrapper[4791]: I1007 00:31:37.208975 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Oct 07 00:31:37 crc kubenswrapper[4791]: I1007 00:31:37.825721 4791 generic.go:334] "Generic (PLEG): container finished" podID="44051cd0-3075-421c-8de3-1a8b1a91517c" containerID="2498ce595b0b891992c65424fa19fdc78dd589db585bbb1de2fdc50a2a1bae61" exitCode=0 Oct 07 00:31:37 crc kubenswrapper[4791]: I1007 00:31:37.825779 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-1-build" event={"ID":"44051cd0-3075-421c-8de3-1a8b1a91517c","Type":"ContainerDied","Data":"2498ce595b0b891992c65424fa19fdc78dd589db585bbb1de2fdc50a2a1bae61"} Oct 07 00:31:37 crc kubenswrapper[4791]: I1007 00:31:37.825813 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-1-build" event={"ID":"44051cd0-3075-421c-8de3-1a8b1a91517c","Type":"ContainerStarted","Data":"c50bb458566fb77abf0d1ebee45d7519de9723ac05925cc0e4058bd51ebe5700"} Oct 07 00:31:38 crc kubenswrapper[4791]: I1007 00:31:38.837739 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-bundle-1-build_44051cd0-3075-421c-8de3-1a8b1a91517c/docker-build/0.log" Oct 07 00:31:38 crc kubenswrapper[4791]: I1007 00:31:38.838947 4791 generic.go:334] "Generic (PLEG): container finished" podID="44051cd0-3075-421c-8de3-1a8b1a91517c" containerID="153503d16ea3f3f23e80bcbb95564360ba5f8d1d798ba4658aea9e7e7d146d24" exitCode=1 Oct 07 00:31:38 crc kubenswrapper[4791]: I1007 00:31:38.839024 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-1-build" event={"ID":"44051cd0-3075-421c-8de3-1a8b1a91517c","Type":"ContainerDied","Data":"153503d16ea3f3f23e80bcbb95564360ba5f8d1d798ba4658aea9e7e7d146d24"} Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.150463 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-bundle-1-build_44051cd0-3075-421c-8de3-1a8b1a91517c/docker-build/0.log" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.151729 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.321460 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/44051cd0-3075-421c-8de3-1a8b1a91517c-node-pullsecrets\") pod \"44051cd0-3075-421c-8de3-1a8b1a91517c\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.321766 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-system-configs\") pod \"44051cd0-3075-421c-8de3-1a8b1a91517c\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.321581 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/44051cd0-3075-421c-8de3-1a8b1a91517c-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "44051cd0-3075-421c-8de3-1a8b1a91517c" (UID: "44051cd0-3075-421c-8de3-1a8b1a91517c"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.322522 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "44051cd0-3075-421c-8de3-1a8b1a91517c" (UID: "44051cd0-3075-421c-8de3-1a8b1a91517c"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.322675 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-buildworkdir\") pod \"44051cd0-3075-421c-8de3-1a8b1a91517c\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.322761 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-proxy-ca-bundles\") pod \"44051cd0-3075-421c-8de3-1a8b1a91517c\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.322855 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/44051cd0-3075-421c-8de3-1a8b1a91517c-builder-dockercfg-bhkdz-pull\") pod \"44051cd0-3075-421c-8de3-1a8b1a91517c\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.322978 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hdjf\" (UniqueName: \"kubernetes.io/projected/44051cd0-3075-421c-8de3-1a8b1a91517c-kube-api-access-2hdjf\") pod \"44051cd0-3075-421c-8de3-1a8b1a91517c\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.323098 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-container-storage-run\") pod \"44051cd0-3075-421c-8de3-1a8b1a91517c\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.323236 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-ca-bundles\") pod \"44051cd0-3075-421c-8de3-1a8b1a91517c\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.323358 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/44051cd0-3075-421c-8de3-1a8b1a91517c-builder-dockercfg-bhkdz-push\") pod \"44051cd0-3075-421c-8de3-1a8b1a91517c\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.323513 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-container-storage-root\") pod \"44051cd0-3075-421c-8de3-1a8b1a91517c\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.323625 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/44051cd0-3075-421c-8de3-1a8b1a91517c-buildcachedir\") pod \"44051cd0-3075-421c-8de3-1a8b1a91517c\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.323737 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-build-blob-cache\") pod \"44051cd0-3075-421c-8de3-1a8b1a91517c\" (UID: \"44051cd0-3075-421c-8de3-1a8b1a91517c\") " Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.323121 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "44051cd0-3075-421c-8de3-1a8b1a91517c" (UID: "44051cd0-3075-421c-8de3-1a8b1a91517c"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.323203 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "44051cd0-3075-421c-8de3-1a8b1a91517c" (UID: "44051cd0-3075-421c-8de3-1a8b1a91517c"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.323730 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/44051cd0-3075-421c-8de3-1a8b1a91517c-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "44051cd0-3075-421c-8de3-1a8b1a91517c" (UID: "44051cd0-3075-421c-8de3-1a8b1a91517c"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.323810 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "44051cd0-3075-421c-8de3-1a8b1a91517c" (UID: "44051cd0-3075-421c-8de3-1a8b1a91517c"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.324142 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "44051cd0-3075-421c-8de3-1a8b1a91517c" (UID: "44051cd0-3075-421c-8de3-1a8b1a91517c"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.324461 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/44051cd0-3075-421c-8de3-1a8b1a91517c-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.324609 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.324522 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "44051cd0-3075-421c-8de3-1a8b1a91517c" (UID: "44051cd0-3075-421c-8de3-1a8b1a91517c"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.324675 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/44051cd0-3075-421c-8de3-1a8b1a91517c-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.324701 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.324718 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.324730 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.324742 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/44051cd0-3075-421c-8de3-1a8b1a91517c-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.325860 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "44051cd0-3075-421c-8de3-1a8b1a91517c" (UID: "44051cd0-3075-421c-8de3-1a8b1a91517c"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.328673 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44051cd0-3075-421c-8de3-1a8b1a91517c-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "44051cd0-3075-421c-8de3-1a8b1a91517c" (UID: "44051cd0-3075-421c-8de3-1a8b1a91517c"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.328711 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44051cd0-3075-421c-8de3-1a8b1a91517c-kube-api-access-2hdjf" (OuterVolumeSpecName: "kube-api-access-2hdjf") pod "44051cd0-3075-421c-8de3-1a8b1a91517c" (UID: "44051cd0-3075-421c-8de3-1a8b1a91517c"). InnerVolumeSpecName "kube-api-access-2hdjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.328924 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44051cd0-3075-421c-8de3-1a8b1a91517c-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "44051cd0-3075-421c-8de3-1a8b1a91517c" (UID: "44051cd0-3075-421c-8de3-1a8b1a91517c"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.426046 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/44051cd0-3075-421c-8de3-1a8b1a91517c-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.426078 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hdjf\" (UniqueName: \"kubernetes.io/projected/44051cd0-3075-421c-8de3-1a8b1a91517c-kube-api-access-2hdjf\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.426088 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.426097 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/44051cd0-3075-421c-8de3-1a8b1a91517c-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.426107 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/44051cd0-3075-421c-8de3-1a8b1a91517c-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.855169 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-bundle-1-build_44051cd0-3075-421c-8de3-1a8b1a91517c/docker-build/0.log" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.856119 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-1-build" event={"ID":"44051cd0-3075-421c-8de3-1a8b1a91517c","Type":"ContainerDied","Data":"c50bb458566fb77abf0d1ebee45d7519de9723ac05925cc0e4058bd51ebe5700"} Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.856169 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c50bb458566fb77abf0d1ebee45d7519de9723ac05925cc0e4058bd51ebe5700" Oct 07 00:31:40 crc kubenswrapper[4791]: I1007 00:31:40.856198 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 07 00:31:47 crc kubenswrapper[4791]: I1007 00:31:47.201781 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Oct 07 00:31:47 crc kubenswrapper[4791]: I1007 00:31:47.207508 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Oct 07 00:31:48 crc kubenswrapper[4791]: I1007 00:31:48.088397 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44051cd0-3075-421c-8de3-1a8b1a91517c" path="/var/lib/kubelet/pods/44051cd0-3075-421c-8de3-1a8b1a91517c/volumes" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.006472 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-2-build"] Oct 07 00:31:49 crc kubenswrapper[4791]: E1007 00:31:49.006737 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44051cd0-3075-421c-8de3-1a8b1a91517c" containerName="manage-dockerfile" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.006752 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="44051cd0-3075-421c-8de3-1a8b1a91517c" containerName="manage-dockerfile" Oct 07 00:31:49 crc kubenswrapper[4791]: E1007 00:31:49.006766 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44051cd0-3075-421c-8de3-1a8b1a91517c" containerName="docker-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.006788 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="44051cd0-3075-421c-8de3-1a8b1a91517c" containerName="docker-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.006896 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="44051cd0-3075-421c-8de3-1a8b1a91517c" containerName="docker-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.007808 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.010863 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-bhkdz" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.011147 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-2-global-ca" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.011306 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-2-ca" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.011529 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-2-sys-config" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.037464 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-2-build"] Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.146070 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-system-configs\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.146152 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.146183 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b376f572-5535-44cb-94a5-a8ec8e9606f8-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.146255 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-container-storage-run\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.146286 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-blob-cache\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.146378 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbhh2\" (UniqueName: \"kubernetes.io/projected/b376f572-5535-44cb-94a5-a8ec8e9606f8-kube-api-access-rbhh2\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.146521 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b376f572-5535-44cb-94a5-a8ec8e9606f8-buildcachedir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.146591 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-buildworkdir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.146656 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-container-storage-root\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.146729 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b376f572-5535-44cb-94a5-a8ec8e9606f8-node-pullsecrets\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.146768 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.146795 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b376f572-5535-44cb-94a5-a8ec8e9606f8-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.247655 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-system-configs\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.247737 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.247790 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b376f572-5535-44cb-94a5-a8ec8e9606f8-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.247813 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-container-storage-run\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.247835 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-blob-cache\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.247871 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbhh2\" (UniqueName: \"kubernetes.io/projected/b376f572-5535-44cb-94a5-a8ec8e9606f8-kube-api-access-rbhh2\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.247891 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b376f572-5535-44cb-94a5-a8ec8e9606f8-buildcachedir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.248133 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b376f572-5535-44cb-94a5-a8ec8e9606f8-buildcachedir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.248614 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-buildworkdir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.248733 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-container-storage-root\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.248799 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b376f572-5535-44cb-94a5-a8ec8e9606f8-node-pullsecrets\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.248835 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.248871 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b376f572-5535-44cb-94a5-a8ec8e9606f8-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.249032 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-container-storage-run\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.249145 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-blob-cache\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.249207 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-buildworkdir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.249157 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b376f572-5535-44cb-94a5-a8ec8e9606f8-node-pullsecrets\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.249584 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-container-storage-root\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.249620 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-system-configs\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.249946 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.250389 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.255537 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b376f572-5535-44cb-94a5-a8ec8e9606f8-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.255592 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b376f572-5535-44cb-94a5-a8ec8e9606f8-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.272018 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbhh2\" (UniqueName: \"kubernetes.io/projected/b376f572-5535-44cb-94a5-a8ec8e9606f8-kube-api-access-rbhh2\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.335708 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.546856 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-2-build"] Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.924027 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"b376f572-5535-44cb-94a5-a8ec8e9606f8","Type":"ContainerStarted","Data":"794a5e669f2d24d306fc6f1081547019f86bd4a9c55889923cd67a1854274edc"} Oct 07 00:31:49 crc kubenswrapper[4791]: I1007 00:31:49.924314 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"b376f572-5535-44cb-94a5-a8ec8e9606f8","Type":"ContainerStarted","Data":"13314b226f412bbb37f8ebeca60f50ba222d55b6e710b6d840c5e16522868c39"} Oct 07 00:31:50 crc kubenswrapper[4791]: E1007 00:31:50.020416 4791 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.217:38354->38.102.83.217:44177: read tcp 38.102.83.217:38354->38.102.83.217:44177: read: connection reset by peer Oct 07 00:31:50 crc kubenswrapper[4791]: I1007 00:31:50.947492 4791 generic.go:334] "Generic (PLEG): container finished" podID="b376f572-5535-44cb-94a5-a8ec8e9606f8" containerID="794a5e669f2d24d306fc6f1081547019f86bd4a9c55889923cd67a1854274edc" exitCode=0 Oct 07 00:31:50 crc kubenswrapper[4791]: I1007 00:31:50.947541 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"b376f572-5535-44cb-94a5-a8ec8e9606f8","Type":"ContainerDied","Data":"794a5e669f2d24d306fc6f1081547019f86bd4a9c55889923cd67a1854274edc"} Oct 07 00:31:51 crc kubenswrapper[4791]: I1007 00:31:51.956597 4791 generic.go:334] "Generic (PLEG): container finished" podID="b376f572-5535-44cb-94a5-a8ec8e9606f8" containerID="467dff72b269780dd918cecded8da59b42a1320999e4303b9a7918ce32ade695" exitCode=0 Oct 07 00:31:51 crc kubenswrapper[4791]: I1007 00:31:51.956698 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"b376f572-5535-44cb-94a5-a8ec8e9606f8","Type":"ContainerDied","Data":"467dff72b269780dd918cecded8da59b42a1320999e4303b9a7918ce32ade695"} Oct 07 00:31:51 crc kubenswrapper[4791]: I1007 00:31:51.991066 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-bundle-2-build_b376f572-5535-44cb-94a5-a8ec8e9606f8/manage-dockerfile/0.log" Oct 07 00:31:52 crc kubenswrapper[4791]: I1007 00:31:52.969724 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"b376f572-5535-44cb-94a5-a8ec8e9606f8","Type":"ContainerStarted","Data":"861f4eed8cd04d4954a9693fda2e17576b7155917116c8d86d45579b11aabe66"} Oct 07 00:31:53 crc kubenswrapper[4791]: I1007 00:31:53.014655 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-bundle-2-build" podStartSLOduration=5.014631731 podStartE2EDuration="5.014631731s" podCreationTimestamp="2025-10-07 00:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:31:53.007531396 +0000 UTC m=+1239.603469047" watchObservedRunningTime="2025-10-07 00:31:53.014631731 +0000 UTC m=+1239.610569382" Oct 07 00:31:55 crc kubenswrapper[4791]: I1007 00:31:55.992788 4791 generic.go:334] "Generic (PLEG): container finished" podID="b376f572-5535-44cb-94a5-a8ec8e9606f8" containerID="861f4eed8cd04d4954a9693fda2e17576b7155917116c8d86d45579b11aabe66" exitCode=0 Oct 07 00:31:55 crc kubenswrapper[4791]: I1007 00:31:55.992843 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"b376f572-5535-44cb-94a5-a8ec8e9606f8","Type":"ContainerDied","Data":"861f4eed8cd04d4954a9693fda2e17576b7155917116c8d86d45579b11aabe66"} Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.283538 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.469188 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b376f572-5535-44cb-94a5-a8ec8e9606f8-builder-dockercfg-bhkdz-pull\") pod \"b376f572-5535-44cb-94a5-a8ec8e9606f8\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.469611 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-proxy-ca-bundles\") pod \"b376f572-5535-44cb-94a5-a8ec8e9606f8\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.469636 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b376f572-5535-44cb-94a5-a8ec8e9606f8-node-pullsecrets\") pod \"b376f572-5535-44cb-94a5-a8ec8e9606f8\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.469660 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b376f572-5535-44cb-94a5-a8ec8e9606f8-buildcachedir\") pod \"b376f572-5535-44cb-94a5-a8ec8e9606f8\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.469692 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-container-storage-run\") pod \"b376f572-5535-44cb-94a5-a8ec8e9606f8\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.469728 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b376f572-5535-44cb-94a5-a8ec8e9606f8-builder-dockercfg-bhkdz-push\") pod \"b376f572-5535-44cb-94a5-a8ec8e9606f8\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.469755 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b376f572-5535-44cb-94a5-a8ec8e9606f8-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "b376f572-5535-44cb-94a5-a8ec8e9606f8" (UID: "b376f572-5535-44cb-94a5-a8ec8e9606f8"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.469816 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b376f572-5535-44cb-94a5-a8ec8e9606f8-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "b376f572-5535-44cb-94a5-a8ec8e9606f8" (UID: "b376f572-5535-44cb-94a5-a8ec8e9606f8"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.469878 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-system-configs\") pod \"b376f572-5535-44cb-94a5-a8ec8e9606f8\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.469932 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-blob-cache\") pod \"b376f572-5535-44cb-94a5-a8ec8e9606f8\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.469987 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-container-storage-root\") pod \"b376f572-5535-44cb-94a5-a8ec8e9606f8\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.470026 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-ca-bundles\") pod \"b376f572-5535-44cb-94a5-a8ec8e9606f8\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.470056 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbhh2\" (UniqueName: \"kubernetes.io/projected/b376f572-5535-44cb-94a5-a8ec8e9606f8-kube-api-access-rbhh2\") pod \"b376f572-5535-44cb-94a5-a8ec8e9606f8\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.470088 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-buildworkdir\") pod \"b376f572-5535-44cb-94a5-a8ec8e9606f8\" (UID: \"b376f572-5535-44cb-94a5-a8ec8e9606f8\") " Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.470317 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b376f572-5535-44cb-94a5-a8ec8e9606f8-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.470334 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b376f572-5535-44cb-94a5-a8ec8e9606f8-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.470728 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "b376f572-5535-44cb-94a5-a8ec8e9606f8" (UID: "b376f572-5535-44cb-94a5-a8ec8e9606f8"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.471452 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "b376f572-5535-44cb-94a5-a8ec8e9606f8" (UID: "b376f572-5535-44cb-94a5-a8ec8e9606f8"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.471469 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "b376f572-5535-44cb-94a5-a8ec8e9606f8" (UID: "b376f572-5535-44cb-94a5-a8ec8e9606f8"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.472088 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "b376f572-5535-44cb-94a5-a8ec8e9606f8" (UID: "b376f572-5535-44cb-94a5-a8ec8e9606f8"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.472258 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "b376f572-5535-44cb-94a5-a8ec8e9606f8" (UID: "b376f572-5535-44cb-94a5-a8ec8e9606f8"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.472676 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "b376f572-5535-44cb-94a5-a8ec8e9606f8" (UID: "b376f572-5535-44cb-94a5-a8ec8e9606f8"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.476131 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b376f572-5535-44cb-94a5-a8ec8e9606f8-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "b376f572-5535-44cb-94a5-a8ec8e9606f8" (UID: "b376f572-5535-44cb-94a5-a8ec8e9606f8"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.477161 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "b376f572-5535-44cb-94a5-a8ec8e9606f8" (UID: "b376f572-5535-44cb-94a5-a8ec8e9606f8"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.478636 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b376f572-5535-44cb-94a5-a8ec8e9606f8-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "b376f572-5535-44cb-94a5-a8ec8e9606f8" (UID: "b376f572-5535-44cb-94a5-a8ec8e9606f8"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.478691 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b376f572-5535-44cb-94a5-a8ec8e9606f8-kube-api-access-rbhh2" (OuterVolumeSpecName: "kube-api-access-rbhh2") pod "b376f572-5535-44cb-94a5-a8ec8e9606f8" (UID: "b376f572-5535-44cb-94a5-a8ec8e9606f8"). InnerVolumeSpecName "kube-api-access-rbhh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.571226 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.571280 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbhh2\" (UniqueName: \"kubernetes.io/projected/b376f572-5535-44cb-94a5-a8ec8e9606f8-kube-api-access-rbhh2\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.571297 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.571307 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b376f572-5535-44cb-94a5-a8ec8e9606f8-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.571318 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.571328 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.571339 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b376f572-5535-44cb-94a5-a8ec8e9606f8-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.571353 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.571365 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:57 crc kubenswrapper[4791]: I1007 00:31:57.571374 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b376f572-5535-44cb-94a5-a8ec8e9606f8-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:31:58 crc kubenswrapper[4791]: I1007 00:31:58.009533 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"b376f572-5535-44cb-94a5-a8ec8e9606f8","Type":"ContainerDied","Data":"13314b226f412bbb37f8ebeca60f50ba222d55b6e710b6d840c5e16522868c39"} Oct 07 00:31:58 crc kubenswrapper[4791]: I1007 00:31:58.009587 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="13314b226f412bbb37f8ebeca60f50ba222d55b6e710b6d840c5e16522868c39" Oct 07 00:31:58 crc kubenswrapper[4791]: I1007 00:31:58.009591 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.062723 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Oct 07 00:32:02 crc kubenswrapper[4791]: E1007 00:32:02.063240 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b376f572-5535-44cb-94a5-a8ec8e9606f8" containerName="manage-dockerfile" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.063255 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="b376f572-5535-44cb-94a5-a8ec8e9606f8" containerName="manage-dockerfile" Oct 07 00:32:02 crc kubenswrapper[4791]: E1007 00:32:02.063266 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b376f572-5535-44cb-94a5-a8ec8e9606f8" containerName="docker-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.063272 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="b376f572-5535-44cb-94a5-a8ec8e9606f8" containerName="docker-build" Oct 07 00:32:02 crc kubenswrapper[4791]: E1007 00:32:02.063284 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b376f572-5535-44cb-94a5-a8ec8e9606f8" containerName="git-clone" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.063290 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="b376f572-5535-44cb-94a5-a8ec8e9606f8" containerName="git-clone" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.063417 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="b376f572-5535-44cb-94a5-a8ec8e9606f8" containerName="docker-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.064170 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.066544 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-1-global-ca" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.066548 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-1-ca" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.067240 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-1-sys-config" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.071118 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-bhkdz" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.085090 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.237320 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ds7n\" (UniqueName: \"kubernetes.io/projected/b15e2345-6ecf-405e-98ce-ab95cf678eb6-kube-api-access-7ds7n\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.237380 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-container-storage-run\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.237427 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-blob-cache\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.237458 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.237636 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b15e2345-6ecf-405e-98ce-ab95cf678eb6-builder-dockercfg-bhkdz-pull\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.237682 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.237809 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-buildworkdir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.237873 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b15e2345-6ecf-405e-98ce-ab95cf678eb6-node-pullsecrets\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.237898 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-system-configs\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.237926 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b15e2345-6ecf-405e-98ce-ab95cf678eb6-buildcachedir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.237951 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-container-storage-root\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.237978 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b15e2345-6ecf-405e-98ce-ab95cf678eb6-builder-dockercfg-bhkdz-push\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.339820 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.339979 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b15e2345-6ecf-405e-98ce-ab95cf678eb6-builder-dockercfg-bhkdz-pull\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.340035 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.340147 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-buildworkdir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.340210 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b15e2345-6ecf-405e-98ce-ab95cf678eb6-node-pullsecrets\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.340242 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-system-configs\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.340278 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b15e2345-6ecf-405e-98ce-ab95cf678eb6-buildcachedir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.340315 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-container-storage-root\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.340360 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b15e2345-6ecf-405e-98ce-ab95cf678eb6-builder-dockercfg-bhkdz-push\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.340506 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ds7n\" (UniqueName: \"kubernetes.io/projected/b15e2345-6ecf-405e-98ce-ab95cf678eb6-kube-api-access-7ds7n\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.340553 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-container-storage-run\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.340953 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-container-storage-root\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.340849 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b15e2345-6ecf-405e-98ce-ab95cf678eb6-buildcachedir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.340960 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-blob-cache\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.341080 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-buildworkdir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.340726 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b15e2345-6ecf-405e-98ce-ab95cf678eb6-node-pullsecrets\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.341285 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-system-configs\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.341598 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-container-storage-run\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.341651 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.342653 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.342990 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-blob-cache\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.344811 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b15e2345-6ecf-405e-98ce-ab95cf678eb6-builder-dockercfg-bhkdz-pull\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.345255 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b15e2345-6ecf-405e-98ce-ab95cf678eb6-builder-dockercfg-bhkdz-push\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.369943 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ds7n\" (UniqueName: \"kubernetes.io/projected/b15e2345-6ecf-405e-98ce-ab95cf678eb6-kube-api-access-7ds7n\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.380510 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:02 crc kubenswrapper[4791]: I1007 00:32:02.600530 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Oct 07 00:32:03 crc kubenswrapper[4791]: I1007 00:32:03.048296 4791 generic.go:334] "Generic (PLEG): container finished" podID="b15e2345-6ecf-405e-98ce-ab95cf678eb6" containerID="047d0ece5b78eb3627ceec5b6488fbd81d7f26da9e9555a94e57c4a1231f0cca" exitCode=0 Oct 07 00:32:03 crc kubenswrapper[4791]: I1007 00:32:03.048349 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-1-build" event={"ID":"b15e2345-6ecf-405e-98ce-ab95cf678eb6","Type":"ContainerDied","Data":"047d0ece5b78eb3627ceec5b6488fbd81d7f26da9e9555a94e57c4a1231f0cca"} Oct 07 00:32:03 crc kubenswrapper[4791]: I1007 00:32:03.048997 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-1-build" event={"ID":"b15e2345-6ecf-405e-98ce-ab95cf678eb6","Type":"ContainerStarted","Data":"392db25a2723ca11e9d7350dc18812f280f2e1f9dbb8349c52282231aedd967d"} Oct 07 00:32:04 crc kubenswrapper[4791]: I1007 00:32:04.064043 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bundle-1-build_b15e2345-6ecf-405e-98ce-ab95cf678eb6/docker-build/0.log" Oct 07 00:32:04 crc kubenswrapper[4791]: I1007 00:32:04.065198 4791 generic.go:334] "Generic (PLEG): container finished" podID="b15e2345-6ecf-405e-98ce-ab95cf678eb6" containerID="d1bb4f8382376ee4ba1579aed38fb3a4f8a87ad2a1ef5f6be9b901b04d81ade3" exitCode=1 Oct 07 00:32:04 crc kubenswrapper[4791]: I1007 00:32:04.065246 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-1-build" event={"ID":"b15e2345-6ecf-405e-98ce-ab95cf678eb6","Type":"ContainerDied","Data":"d1bb4f8382376ee4ba1579aed38fb3a4f8a87ad2a1ef5f6be9b901b04d81ade3"} Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.304256 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bundle-1-build_b15e2345-6ecf-405e-98ce-ab95cf678eb6/docker-build/0.log" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.305060 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489282 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-buildworkdir\") pod \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489354 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-system-configs\") pod \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489384 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-proxy-ca-bundles\") pod \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489485 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b15e2345-6ecf-405e-98ce-ab95cf678eb6-buildcachedir\") pod \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489532 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-container-storage-root\") pod \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489552 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-container-storage-run\") pod \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489573 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b15e2345-6ecf-405e-98ce-ab95cf678eb6-builder-dockercfg-bhkdz-pull\") pod \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489599 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b15e2345-6ecf-405e-98ce-ab95cf678eb6-node-pullsecrets\") pod \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489654 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-blob-cache\") pod \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489687 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b15e2345-6ecf-405e-98ce-ab95cf678eb6-builder-dockercfg-bhkdz-push\") pod \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489758 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ds7n\" (UniqueName: \"kubernetes.io/projected/b15e2345-6ecf-405e-98ce-ab95cf678eb6-kube-api-access-7ds7n\") pod \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489761 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b15e2345-6ecf-405e-98ce-ab95cf678eb6-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "b15e2345-6ecf-405e-98ce-ab95cf678eb6" (UID: "b15e2345-6ecf-405e-98ce-ab95cf678eb6"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489901 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "b15e2345-6ecf-405e-98ce-ab95cf678eb6" (UID: "b15e2345-6ecf-405e-98ce-ab95cf678eb6"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489919 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-ca-bundles\") pod \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\" (UID: \"b15e2345-6ecf-405e-98ce-ab95cf678eb6\") " Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.489952 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b15e2345-6ecf-405e-98ce-ab95cf678eb6-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "b15e2345-6ecf-405e-98ce-ab95cf678eb6" (UID: "b15e2345-6ecf-405e-98ce-ab95cf678eb6"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.490330 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "b15e2345-6ecf-405e-98ce-ab95cf678eb6" (UID: "b15e2345-6ecf-405e-98ce-ab95cf678eb6"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.490471 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b15e2345-6ecf-405e-98ce-ab95cf678eb6-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.490541 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.490557 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.490569 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b15e2345-6ecf-405e-98ce-ab95cf678eb6-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.490813 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "b15e2345-6ecf-405e-98ce-ab95cf678eb6" (UID: "b15e2345-6ecf-405e-98ce-ab95cf678eb6"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.493072 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "b15e2345-6ecf-405e-98ce-ab95cf678eb6" (UID: "b15e2345-6ecf-405e-98ce-ab95cf678eb6"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.493178 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "b15e2345-6ecf-405e-98ce-ab95cf678eb6" (UID: "b15e2345-6ecf-405e-98ce-ab95cf678eb6"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.493389 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "b15e2345-6ecf-405e-98ce-ab95cf678eb6" (UID: "b15e2345-6ecf-405e-98ce-ab95cf678eb6"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.493467 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "b15e2345-6ecf-405e-98ce-ab95cf678eb6" (UID: "b15e2345-6ecf-405e-98ce-ab95cf678eb6"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.498292 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b15e2345-6ecf-405e-98ce-ab95cf678eb6-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "b15e2345-6ecf-405e-98ce-ab95cf678eb6" (UID: "b15e2345-6ecf-405e-98ce-ab95cf678eb6"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.498312 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b15e2345-6ecf-405e-98ce-ab95cf678eb6-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "b15e2345-6ecf-405e-98ce-ab95cf678eb6" (UID: "b15e2345-6ecf-405e-98ce-ab95cf678eb6"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.498607 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b15e2345-6ecf-405e-98ce-ab95cf678eb6-kube-api-access-7ds7n" (OuterVolumeSpecName: "kube-api-access-7ds7n") pod "b15e2345-6ecf-405e-98ce-ab95cf678eb6" (UID: "b15e2345-6ecf-405e-98ce-ab95cf678eb6"). InnerVolumeSpecName "kube-api-access-7ds7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.592260 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.592300 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.592315 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.592324 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.592333 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/b15e2345-6ecf-405e-98ce-ab95cf678eb6-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.592341 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b15e2345-6ecf-405e-98ce-ab95cf678eb6-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.592349 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/b15e2345-6ecf-405e-98ce-ab95cf678eb6-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:05 crc kubenswrapper[4791]: I1007 00:32:05.592357 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ds7n\" (UniqueName: \"kubernetes.io/projected/b15e2345-6ecf-405e-98ce-ab95cf678eb6-kube-api-access-7ds7n\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:06 crc kubenswrapper[4791]: I1007 00:32:06.083187 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bundle-1-build_b15e2345-6ecf-405e-98ce-ab95cf678eb6/docker-build/0.log" Oct 07 00:32:06 crc kubenswrapper[4791]: I1007 00:32:06.083820 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-1-build" event={"ID":"b15e2345-6ecf-405e-98ce-ab95cf678eb6","Type":"ContainerDied","Data":"392db25a2723ca11e9d7350dc18812f280f2e1f9dbb8349c52282231aedd967d"} Oct 07 00:32:06 crc kubenswrapper[4791]: I1007 00:32:06.083868 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="392db25a2723ca11e9d7350dc18812f280f2e1f9dbb8349c52282231aedd967d" Oct 07 00:32:06 crc kubenswrapper[4791]: I1007 00:32:06.083925 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 07 00:32:11 crc kubenswrapper[4791]: I1007 00:32:11.600670 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:32:11 crc kubenswrapper[4791]: I1007 00:32:11.601102 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:32:12 crc kubenswrapper[4791]: I1007 00:32:12.585958 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Oct 07 00:32:12 crc kubenswrapper[4791]: I1007 00:32:12.591740 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.081241 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b15e2345-6ecf-405e-98ce-ab95cf678eb6" path="/var/lib/kubelet/pods/b15e2345-6ecf-405e-98ce-ab95cf678eb6/volumes" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.192453 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-2-build"] Oct 07 00:32:14 crc kubenswrapper[4791]: E1007 00:32:14.192764 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b15e2345-6ecf-405e-98ce-ab95cf678eb6" containerName="docker-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.192780 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="b15e2345-6ecf-405e-98ce-ab95cf678eb6" containerName="docker-build" Oct 07 00:32:14 crc kubenswrapper[4791]: E1007 00:32:14.192800 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b15e2345-6ecf-405e-98ce-ab95cf678eb6" containerName="manage-dockerfile" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.192811 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="b15e2345-6ecf-405e-98ce-ab95cf678eb6" containerName="manage-dockerfile" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.192950 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="b15e2345-6ecf-405e-98ce-ab95cf678eb6" containerName="docker-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.194095 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.196571 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-2-ca" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.196945 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-bhkdz" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.197013 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-2-sys-config" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.200052 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-2-global-ca" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.211892 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-2-build"] Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.314427 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.314487 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/d3cd2411-acc8-495c-a947-47bb4383cfd0-builder-dockercfg-bhkdz-push\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.314521 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-blob-cache\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.314557 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-system-configs\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.314596 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-container-storage-root\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.314624 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zwjr\" (UniqueName: \"kubernetes.io/projected/d3cd2411-acc8-495c-a947-47bb4383cfd0-kube-api-access-5zwjr\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.314658 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d3cd2411-acc8-495c-a947-47bb4383cfd0-buildcachedir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.314690 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-container-storage-run\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.314766 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/d3cd2411-acc8-495c-a947-47bb4383cfd0-builder-dockercfg-bhkdz-pull\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.314794 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-buildworkdir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.314837 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.314860 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d3cd2411-acc8-495c-a947-47bb4383cfd0-node-pullsecrets\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.415938 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-system-configs\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.415997 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-container-storage-root\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416017 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zwjr\" (UniqueName: \"kubernetes.io/projected/d3cd2411-acc8-495c-a947-47bb4383cfd0-kube-api-access-5zwjr\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416045 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d3cd2411-acc8-495c-a947-47bb4383cfd0-buildcachedir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416077 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-container-storage-run\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416128 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/d3cd2411-acc8-495c-a947-47bb4383cfd0-builder-dockercfg-bhkdz-pull\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416158 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-buildworkdir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416177 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416200 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d3cd2411-acc8-495c-a947-47bb4383cfd0-node-pullsecrets\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416248 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416273 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/d3cd2411-acc8-495c-a947-47bb4383cfd0-builder-dockercfg-bhkdz-push\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416297 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-blob-cache\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416195 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d3cd2411-acc8-495c-a947-47bb4383cfd0-buildcachedir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416571 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d3cd2411-acc8-495c-a947-47bb4383cfd0-node-pullsecrets\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416672 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-container-storage-root\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416722 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-container-storage-run\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416769 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-buildworkdir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416814 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-blob-cache\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.416846 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-system-configs\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.417280 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.417605 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.427033 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/d3cd2411-acc8-495c-a947-47bb4383cfd0-builder-dockercfg-bhkdz-pull\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.427033 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/d3cd2411-acc8-495c-a947-47bb4383cfd0-builder-dockercfg-bhkdz-push\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.433751 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zwjr\" (UniqueName: \"kubernetes.io/projected/d3cd2411-acc8-495c-a947-47bb4383cfd0-kube-api-access-5zwjr\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.514383 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:14 crc kubenswrapper[4791]: I1007 00:32:14.752183 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-2-build"] Oct 07 00:32:15 crc kubenswrapper[4791]: I1007 00:32:15.140308 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"d3cd2411-acc8-495c-a947-47bb4383cfd0","Type":"ContainerStarted","Data":"ca319d35ebd593f896ea0b89e9ef5394eda04110a9f51912c5be966aaba09d8a"} Oct 07 00:32:15 crc kubenswrapper[4791]: I1007 00:32:15.140360 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"d3cd2411-acc8-495c-a947-47bb4383cfd0","Type":"ContainerStarted","Data":"1b95b96fd5ab30db8d65cc43b45ff0403e4e19c08de81a7c879c1ee5ccab6bcd"} Oct 07 00:32:16 crc kubenswrapper[4791]: I1007 00:32:16.148035 4791 generic.go:334] "Generic (PLEG): container finished" podID="d3cd2411-acc8-495c-a947-47bb4383cfd0" containerID="ca319d35ebd593f896ea0b89e9ef5394eda04110a9f51912c5be966aaba09d8a" exitCode=0 Oct 07 00:32:16 crc kubenswrapper[4791]: I1007 00:32:16.148147 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"d3cd2411-acc8-495c-a947-47bb4383cfd0","Type":"ContainerDied","Data":"ca319d35ebd593f896ea0b89e9ef5394eda04110a9f51912c5be966aaba09d8a"} Oct 07 00:32:17 crc kubenswrapper[4791]: I1007 00:32:17.158612 4791 generic.go:334] "Generic (PLEG): container finished" podID="d3cd2411-acc8-495c-a947-47bb4383cfd0" containerID="33a98f6537dd93fcabd551d1f42015286966e92095b746f3aaf58d0261a9d307" exitCode=0 Oct 07 00:32:17 crc kubenswrapper[4791]: I1007 00:32:17.158671 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"d3cd2411-acc8-495c-a947-47bb4383cfd0","Type":"ContainerDied","Data":"33a98f6537dd93fcabd551d1f42015286966e92095b746f3aaf58d0261a9d307"} Oct 07 00:32:17 crc kubenswrapper[4791]: I1007 00:32:17.193893 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bundle-2-build_d3cd2411-acc8-495c-a947-47bb4383cfd0/manage-dockerfile/0.log" Oct 07 00:32:18 crc kubenswrapper[4791]: I1007 00:32:18.170386 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"d3cd2411-acc8-495c-a947-47bb4383cfd0","Type":"ContainerStarted","Data":"47bb11ea73bff043a008da416d25c0596088d5f79cc0b9d1862deabdb6268354"} Oct 07 00:32:20 crc kubenswrapper[4791]: I1007 00:32:20.184042 4791 generic.go:334] "Generic (PLEG): container finished" podID="d3cd2411-acc8-495c-a947-47bb4383cfd0" containerID="47bb11ea73bff043a008da416d25c0596088d5f79cc0b9d1862deabdb6268354" exitCode=0 Oct 07 00:32:20 crc kubenswrapper[4791]: I1007 00:32:20.184105 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"d3cd2411-acc8-495c-a947-47bb4383cfd0","Type":"ContainerDied","Data":"47bb11ea73bff043a008da416d25c0596088d5f79cc0b9d1862deabdb6268354"} Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.469707 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.620843 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-container-storage-run\") pod \"d3cd2411-acc8-495c-a947-47bb4383cfd0\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.620894 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-proxy-ca-bundles\") pod \"d3cd2411-acc8-495c-a947-47bb4383cfd0\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.620931 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d3cd2411-acc8-495c-a947-47bb4383cfd0-buildcachedir\") pod \"d3cd2411-acc8-495c-a947-47bb4383cfd0\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.620965 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/d3cd2411-acc8-495c-a947-47bb4383cfd0-builder-dockercfg-bhkdz-pull\") pod \"d3cd2411-acc8-495c-a947-47bb4383cfd0\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.621030 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-blob-cache\") pod \"d3cd2411-acc8-495c-a947-47bb4383cfd0\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.621052 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-ca-bundles\") pod \"d3cd2411-acc8-495c-a947-47bb4383cfd0\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.621086 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/d3cd2411-acc8-495c-a947-47bb4383cfd0-builder-dockercfg-bhkdz-push\") pod \"d3cd2411-acc8-495c-a947-47bb4383cfd0\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.621068 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3cd2411-acc8-495c-a947-47bb4383cfd0-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "d3cd2411-acc8-495c-a947-47bb4383cfd0" (UID: "d3cd2411-acc8-495c-a947-47bb4383cfd0"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.621107 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zwjr\" (UniqueName: \"kubernetes.io/projected/d3cd2411-acc8-495c-a947-47bb4383cfd0-kube-api-access-5zwjr\") pod \"d3cd2411-acc8-495c-a947-47bb4383cfd0\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.621212 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-container-storage-root\") pod \"d3cd2411-acc8-495c-a947-47bb4383cfd0\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.621253 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d3cd2411-acc8-495c-a947-47bb4383cfd0-node-pullsecrets\") pod \"d3cd2411-acc8-495c-a947-47bb4383cfd0\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.621301 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-buildworkdir\") pod \"d3cd2411-acc8-495c-a947-47bb4383cfd0\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.621360 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-system-configs\") pod \"d3cd2411-acc8-495c-a947-47bb4383cfd0\" (UID: \"d3cd2411-acc8-495c-a947-47bb4383cfd0\") " Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.621521 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3cd2411-acc8-495c-a947-47bb4383cfd0-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "d3cd2411-acc8-495c-a947-47bb4383cfd0" (UID: "d3cd2411-acc8-495c-a947-47bb4383cfd0"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.621858 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d3cd2411-acc8-495c-a947-47bb4383cfd0-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.621921 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d3cd2411-acc8-495c-a947-47bb4383cfd0-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.621860 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "d3cd2411-acc8-495c-a947-47bb4383cfd0" (UID: "d3cd2411-acc8-495c-a947-47bb4383cfd0"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.621960 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "d3cd2411-acc8-495c-a947-47bb4383cfd0" (UID: "d3cd2411-acc8-495c-a947-47bb4383cfd0"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.622419 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "d3cd2411-acc8-495c-a947-47bb4383cfd0" (UID: "d3cd2411-acc8-495c-a947-47bb4383cfd0"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.622753 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "d3cd2411-acc8-495c-a947-47bb4383cfd0" (UID: "d3cd2411-acc8-495c-a947-47bb4383cfd0"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.623041 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "d3cd2411-acc8-495c-a947-47bb4383cfd0" (UID: "d3cd2411-acc8-495c-a947-47bb4383cfd0"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.623185 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "d3cd2411-acc8-495c-a947-47bb4383cfd0" (UID: "d3cd2411-acc8-495c-a947-47bb4383cfd0"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.626870 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3cd2411-acc8-495c-a947-47bb4383cfd0-kube-api-access-5zwjr" (OuterVolumeSpecName: "kube-api-access-5zwjr") pod "d3cd2411-acc8-495c-a947-47bb4383cfd0" (UID: "d3cd2411-acc8-495c-a947-47bb4383cfd0"). InnerVolumeSpecName "kube-api-access-5zwjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.627115 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3cd2411-acc8-495c-a947-47bb4383cfd0-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "d3cd2411-acc8-495c-a947-47bb4383cfd0" (UID: "d3cd2411-acc8-495c-a947-47bb4383cfd0"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.627367 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3cd2411-acc8-495c-a947-47bb4383cfd0-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "d3cd2411-acc8-495c-a947-47bb4383cfd0" (UID: "d3cd2411-acc8-495c-a947-47bb4383cfd0"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.627655 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "d3cd2411-acc8-495c-a947-47bb4383cfd0" (UID: "d3cd2411-acc8-495c-a947-47bb4383cfd0"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.723673 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/d3cd2411-acc8-495c-a947-47bb4383cfd0-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.723716 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.723725 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.723735 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/d3cd2411-acc8-495c-a947-47bb4383cfd0-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.723743 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zwjr\" (UniqueName: \"kubernetes.io/projected/d3cd2411-acc8-495c-a947-47bb4383cfd0-kube-api-access-5zwjr\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.723754 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.723762 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.723773 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.723781 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d3cd2411-acc8-495c-a947-47bb4383cfd0-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:21 crc kubenswrapper[4791]: I1007 00:32:21.723789 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d3cd2411-acc8-495c-a947-47bb4383cfd0-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:32:22 crc kubenswrapper[4791]: I1007 00:32:22.200311 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"d3cd2411-acc8-495c-a947-47bb4383cfd0","Type":"ContainerDied","Data":"1b95b96fd5ab30db8d65cc43b45ff0403e4e19c08de81a7c879c1ee5ccab6bcd"} Oct 07 00:32:22 crc kubenswrapper[4791]: I1007 00:32:22.200366 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b95b96fd5ab30db8d65cc43b45ff0403e4e19c08de81a7c879c1ee5ccab6bcd" Oct 07 00:32:22 crc kubenswrapper[4791]: I1007 00:32:22.200395 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 07 00:32:37 crc kubenswrapper[4791]: I1007 00:32:37.918272 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Oct 07 00:32:37 crc kubenswrapper[4791]: E1007 00:32:37.919157 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3cd2411-acc8-495c-a947-47bb4383cfd0" containerName="git-clone" Oct 07 00:32:37 crc kubenswrapper[4791]: I1007 00:32:37.919172 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3cd2411-acc8-495c-a947-47bb4383cfd0" containerName="git-clone" Oct 07 00:32:37 crc kubenswrapper[4791]: E1007 00:32:37.919186 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3cd2411-acc8-495c-a947-47bb4383cfd0" containerName="manage-dockerfile" Oct 07 00:32:37 crc kubenswrapper[4791]: I1007 00:32:37.919192 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3cd2411-acc8-495c-a947-47bb4383cfd0" containerName="manage-dockerfile" Oct 07 00:32:37 crc kubenswrapper[4791]: E1007 00:32:37.919204 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3cd2411-acc8-495c-a947-47bb4383cfd0" containerName="docker-build" Oct 07 00:32:37 crc kubenswrapper[4791]: I1007 00:32:37.919210 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3cd2411-acc8-495c-a947-47bb4383cfd0" containerName="docker-build" Oct 07 00:32:37 crc kubenswrapper[4791]: I1007 00:32:37.919312 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3cd2411-acc8-495c-a947-47bb4383cfd0" containerName="docker-build" Oct 07 00:32:37 crc kubenswrapper[4791]: I1007 00:32:37.920207 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:37 crc kubenswrapper[4791]: I1007 00:32:37.933642 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-framework-index-dockercfg" Oct 07 00:32:37 crc kubenswrapper[4791]: I1007 00:32:37.933746 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-global-ca" Oct 07 00:32:37 crc kubenswrapper[4791]: I1007 00:32:37.934059 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-sys-config" Oct 07 00:32:37 crc kubenswrapper[4791]: I1007 00:32:37.934129 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-ca" Oct 07 00:32:37 crc kubenswrapper[4791]: I1007 00:32:37.934091 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-bhkdz" Oct 07 00:32:37 crc kubenswrapper[4791]: I1007 00:32:37.949993 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.061537 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.061592 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9097d00f-ef32-4168-a21f-e94101099ead-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.061616 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.061647 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.062025 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.062105 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.062230 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.062252 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/9097d00f-ef32-4168-a21f-e94101099ead-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.062276 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.062313 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.062337 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.062514 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzwbh\" (UniqueName: \"kubernetes.io/projected/9097d00f-ef32-4168-a21f-e94101099ead-kube-api-access-lzwbh\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.062563 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.164171 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.164241 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.164267 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.164287 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/9097d00f-ef32-4168-a21f-e94101099ead-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.164313 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.164342 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.164375 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.164440 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzwbh\" (UniqueName: \"kubernetes.io/projected/9097d00f-ef32-4168-a21f-e94101099ead-kube-api-access-lzwbh\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.164470 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.164482 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/9097d00f-ef32-4168-a21f-e94101099ead-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.164502 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.165527 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9097d00f-ef32-4168-a21f-e94101099ead-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.165653 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.165358 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.165326 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.165611 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9097d00f-ef32-4168-a21f-e94101099ead-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.165765 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.165541 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.165821 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.166074 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.166216 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.167168 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.170981 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-builder-dockercfg-bhkdz-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.172229 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-builder-dockercfg-bhkdz-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.172334 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.186163 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzwbh\" (UniqueName: \"kubernetes.io/projected/9097d00f-ef32-4168-a21f-e94101099ead-kube-api-access-lzwbh\") pod \"service-telemetry-framework-index-1-build\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.260310 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:32:38 crc kubenswrapper[4791]: I1007 00:32:38.503260 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Oct 07 00:32:39 crc kubenswrapper[4791]: I1007 00:32:39.326018 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"9097d00f-ef32-4168-a21f-e94101099ead","Type":"ContainerStarted","Data":"c05bd129a0671a60485dcc7c9939db8f316dd3517321722037b7be5e37502c8b"} Oct 07 00:32:39 crc kubenswrapper[4791]: I1007 00:32:39.326439 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"9097d00f-ef32-4168-a21f-e94101099ead","Type":"ContainerStarted","Data":"8fdd8cf7f255124806dd48648656c7af15b7a6ce4605ca0721d5edb2fb391059"} Oct 07 00:32:40 crc kubenswrapper[4791]: I1007 00:32:40.334636 4791 generic.go:334] "Generic (PLEG): container finished" podID="9097d00f-ef32-4168-a21f-e94101099ead" containerID="c05bd129a0671a60485dcc7c9939db8f316dd3517321722037b7be5e37502c8b" exitCode=0 Oct 07 00:32:40 crc kubenswrapper[4791]: I1007 00:32:40.334733 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"9097d00f-ef32-4168-a21f-e94101099ead","Type":"ContainerDied","Data":"c05bd129a0671a60485dcc7c9939db8f316dd3517321722037b7be5e37502c8b"} Oct 07 00:32:41 crc kubenswrapper[4791]: I1007 00:32:41.344381 4791 generic.go:334] "Generic (PLEG): container finished" podID="9097d00f-ef32-4168-a21f-e94101099ead" containerID="b1cefc299d12f56adcd7887978faa009e44668bcab61b3d0c91ebd29f04a3b15" exitCode=0 Oct 07 00:32:41 crc kubenswrapper[4791]: I1007 00:32:41.344454 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"9097d00f-ef32-4168-a21f-e94101099ead","Type":"ContainerDied","Data":"b1cefc299d12f56adcd7887978faa009e44668bcab61b3d0c91ebd29f04a3b15"} Oct 07 00:32:41 crc kubenswrapper[4791]: I1007 00:32:41.382989 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-framework-index-1-build_9097d00f-ef32-4168-a21f-e94101099ead/manage-dockerfile/0.log" Oct 07 00:32:41 crc kubenswrapper[4791]: I1007 00:32:41.600885 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:32:41 crc kubenswrapper[4791]: I1007 00:32:41.601118 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:32:42 crc kubenswrapper[4791]: I1007 00:32:42.354939 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"9097d00f-ef32-4168-a21f-e94101099ead","Type":"ContainerStarted","Data":"d6b0f18ea2fb067ca0ba5356bf9a6655c96cb165a662a74f67c5bc7fdf8a92f7"} Oct 07 00:32:42 crc kubenswrapper[4791]: I1007 00:32:42.384331 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-index-1-build" podStartSLOduration=5.384308914 podStartE2EDuration="5.384308914s" podCreationTimestamp="2025-10-07 00:32:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:32:42.381994847 +0000 UTC m=+1288.977932498" watchObservedRunningTime="2025-10-07 00:32:42.384308914 +0000 UTC m=+1288.980246575" Oct 07 00:33:11 crc kubenswrapper[4791]: I1007 00:33:11.548509 4791 generic.go:334] "Generic (PLEG): container finished" podID="9097d00f-ef32-4168-a21f-e94101099ead" containerID="d6b0f18ea2fb067ca0ba5356bf9a6655c96cb165a662a74f67c5bc7fdf8a92f7" exitCode=0 Oct 07 00:33:11 crc kubenswrapper[4791]: I1007 00:33:11.548578 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"9097d00f-ef32-4168-a21f-e94101099ead","Type":"ContainerDied","Data":"d6b0f18ea2fb067ca0ba5356bf9a6655c96cb165a662a74f67c5bc7fdf8a92f7"} Oct 07 00:33:11 crc kubenswrapper[4791]: I1007 00:33:11.601168 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:33:11 crc kubenswrapper[4791]: I1007 00:33:11.601237 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:33:11 crc kubenswrapper[4791]: I1007 00:33:11.601289 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:33:11 crc kubenswrapper[4791]: I1007 00:33:11.601913 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"186c564d61df70f559e1048abd8501416d9bf37bf9acf5cdce844554cae2f448"} pod="openshift-machine-config-operator/machine-config-daemon-h728c" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 00:33:11 crc kubenswrapper[4791]: I1007 00:33:11.601965 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" containerID="cri-o://186c564d61df70f559e1048abd8501416d9bf37bf9acf5cdce844554cae2f448" gracePeriod=600 Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.558675 4791 generic.go:334] "Generic (PLEG): container finished" podID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerID="186c564d61df70f559e1048abd8501416d9bf37bf9acf5cdce844554cae2f448" exitCode=0 Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.558748 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerDied","Data":"186c564d61df70f559e1048abd8501416d9bf37bf9acf5cdce844554cae2f448"} Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.559385 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerStarted","Data":"adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e"} Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.559428 4791 scope.go:117] "RemoveContainer" containerID="707f6b1f578a3829a964bf21bc15d9b1043ee4b1415c1be3d0c64a4ecaf4fa34" Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.793320 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.945596 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-ca-bundles\") pod \"9097d00f-ef32-4168-a21f-e94101099ead\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.945652 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"9097d00f-ef32-4168-a21f-e94101099ead\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.945695 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-buildworkdir\") pod \"9097d00f-ef32-4168-a21f-e94101099ead\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.945723 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-container-storage-root\") pod \"9097d00f-ef32-4168-a21f-e94101099ead\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.945752 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-builder-dockercfg-bhkdz-pull\") pod \"9097d00f-ef32-4168-a21f-e94101099ead\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.945812 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-system-configs\") pod \"9097d00f-ef32-4168-a21f-e94101099ead\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.945833 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-builder-dockercfg-bhkdz-push\") pod \"9097d00f-ef32-4168-a21f-e94101099ead\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.945893 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-container-storage-run\") pod \"9097d00f-ef32-4168-a21f-e94101099ead\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.945920 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/9097d00f-ef32-4168-a21f-e94101099ead-buildcachedir\") pod \"9097d00f-ef32-4168-a21f-e94101099ead\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.945947 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-build-blob-cache\") pod \"9097d00f-ef32-4168-a21f-e94101099ead\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.945964 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzwbh\" (UniqueName: \"kubernetes.io/projected/9097d00f-ef32-4168-a21f-e94101099ead-kube-api-access-lzwbh\") pod \"9097d00f-ef32-4168-a21f-e94101099ead\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.946012 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-proxy-ca-bundles\") pod \"9097d00f-ef32-4168-a21f-e94101099ead\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.946242 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9097d00f-ef32-4168-a21f-e94101099ead-node-pullsecrets\") pod \"9097d00f-ef32-4168-a21f-e94101099ead\" (UID: \"9097d00f-ef32-4168-a21f-e94101099ead\") " Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.946250 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9097d00f-ef32-4168-a21f-e94101099ead-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "9097d00f-ef32-4168-a21f-e94101099ead" (UID: "9097d00f-ef32-4168-a21f-e94101099ead"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.946517 4791 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/9097d00f-ef32-4168-a21f-e94101099ead-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.946551 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9097d00f-ef32-4168-a21f-e94101099ead-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "9097d00f-ef32-4168-a21f-e94101099ead" (UID: "9097d00f-ef32-4168-a21f-e94101099ead"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.947273 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "9097d00f-ef32-4168-a21f-e94101099ead" (UID: "9097d00f-ef32-4168-a21f-e94101099ead"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.947303 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "9097d00f-ef32-4168-a21f-e94101099ead" (UID: "9097d00f-ef32-4168-a21f-e94101099ead"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.947806 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "9097d00f-ef32-4168-a21f-e94101099ead" (UID: "9097d00f-ef32-4168-a21f-e94101099ead"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.947849 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "9097d00f-ef32-4168-a21f-e94101099ead" (UID: "9097d00f-ef32-4168-a21f-e94101099ead"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.948775 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "9097d00f-ef32-4168-a21f-e94101099ead" (UID: "9097d00f-ef32-4168-a21f-e94101099ead"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.953231 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-builder-dockercfg-bhkdz-pull" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-pull") pod "9097d00f-ef32-4168-a21f-e94101099ead" (UID: "9097d00f-ef32-4168-a21f-e94101099ead"). InnerVolumeSpecName "builder-dockercfg-bhkdz-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.953272 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-service-telemetry-framework-index-dockercfg-user-build-volume" (OuterVolumeSpecName: "service-telemetry-framework-index-dockercfg-user-build-volume") pod "9097d00f-ef32-4168-a21f-e94101099ead" (UID: "9097d00f-ef32-4168-a21f-e94101099ead"). InnerVolumeSpecName "service-telemetry-framework-index-dockercfg-user-build-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.953493 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9097d00f-ef32-4168-a21f-e94101099ead-kube-api-access-lzwbh" (OuterVolumeSpecName: "kube-api-access-lzwbh") pod "9097d00f-ef32-4168-a21f-e94101099ead" (UID: "9097d00f-ef32-4168-a21f-e94101099ead"). InnerVolumeSpecName "kube-api-access-lzwbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:33:12 crc kubenswrapper[4791]: I1007 00:33:12.953790 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-builder-dockercfg-bhkdz-push" (OuterVolumeSpecName: "builder-dockercfg-bhkdz-push") pod "9097d00f-ef32-4168-a21f-e94101099ead" (UID: "9097d00f-ef32-4168-a21f-e94101099ead"). InnerVolumeSpecName "builder-dockercfg-bhkdz-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.047660 4791 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.047697 4791 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9097d00f-ef32-4168-a21f-e94101099ead-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.047709 4791 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.047721 4791 reconciler_common.go:293] "Volume detached for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-service-telemetry-framework-index-dockercfg-user-build-volume\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.047735 4791 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.047748 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-pull\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-builder-dockercfg-bhkdz-pull\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.047761 4791 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/9097d00f-ef32-4168-a21f-e94101099ead-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.047771 4791 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-bhkdz-push\" (UniqueName: \"kubernetes.io/secret/9097d00f-ef32-4168-a21f-e94101099ead-builder-dockercfg-bhkdz-push\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.047781 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.047791 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzwbh\" (UniqueName: \"kubernetes.io/projected/9097d00f-ef32-4168-a21f-e94101099ead-kube-api-access-lzwbh\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.180984 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "9097d00f-ef32-4168-a21f-e94101099ead" (UID: "9097d00f-ef32-4168-a21f-e94101099ead"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.251095 4791 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.566855 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"9097d00f-ef32-4168-a21f-e94101099ead","Type":"ContainerDied","Data":"8fdd8cf7f255124806dd48648656c7af15b7a6ce4605ca0721d5edb2fb391059"} Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.567639 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8fdd8cf7f255124806dd48648656c7af15b7a6ce4605ca0721d5edb2fb391059" Oct 07 00:33:13 crc kubenswrapper[4791]: I1007 00:33:13.567297 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 07 00:33:14 crc kubenswrapper[4791]: I1007 00:33:14.521007 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "9097d00f-ef32-4168-a21f-e94101099ead" (UID: "9097d00f-ef32-4168-a21f-e94101099ead"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:33:14 crc kubenswrapper[4791]: I1007 00:33:14.572396 4791 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/9097d00f-ef32-4168-a21f-e94101099ead-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:15 crc kubenswrapper[4791]: I1007 00:33:15.270513 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-f85lr"] Oct 07 00:33:15 crc kubenswrapper[4791]: E1007 00:33:15.270795 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9097d00f-ef32-4168-a21f-e94101099ead" containerName="docker-build" Oct 07 00:33:15 crc kubenswrapper[4791]: I1007 00:33:15.270809 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="9097d00f-ef32-4168-a21f-e94101099ead" containerName="docker-build" Oct 07 00:33:15 crc kubenswrapper[4791]: E1007 00:33:15.270822 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9097d00f-ef32-4168-a21f-e94101099ead" containerName="manage-dockerfile" Oct 07 00:33:15 crc kubenswrapper[4791]: I1007 00:33:15.270829 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="9097d00f-ef32-4168-a21f-e94101099ead" containerName="manage-dockerfile" Oct 07 00:33:15 crc kubenswrapper[4791]: E1007 00:33:15.270842 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9097d00f-ef32-4168-a21f-e94101099ead" containerName="git-clone" Oct 07 00:33:15 crc kubenswrapper[4791]: I1007 00:33:15.270850 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="9097d00f-ef32-4168-a21f-e94101099ead" containerName="git-clone" Oct 07 00:33:15 crc kubenswrapper[4791]: I1007 00:33:15.270997 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="9097d00f-ef32-4168-a21f-e94101099ead" containerName="docker-build" Oct 07 00:33:15 crc kubenswrapper[4791]: I1007 00:33:15.271782 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-f85lr" Oct 07 00:33:15 crc kubenswrapper[4791]: I1007 00:33:15.274627 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"infrawatch-operators-dockercfg-76kqc" Oct 07 00:33:15 crc kubenswrapper[4791]: I1007 00:33:15.280785 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-f85lr"] Oct 07 00:33:15 crc kubenswrapper[4791]: I1007 00:33:15.281946 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwh6c\" (UniqueName: \"kubernetes.io/projected/5ecefac5-93e2-4296-aed3-bcfef9830bff-kube-api-access-rwh6c\") pod \"infrawatch-operators-f85lr\" (UID: \"5ecefac5-93e2-4296-aed3-bcfef9830bff\") " pod="service-telemetry/infrawatch-operators-f85lr" Oct 07 00:33:15 crc kubenswrapper[4791]: I1007 00:33:15.383165 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwh6c\" (UniqueName: \"kubernetes.io/projected/5ecefac5-93e2-4296-aed3-bcfef9830bff-kube-api-access-rwh6c\") pod \"infrawatch-operators-f85lr\" (UID: \"5ecefac5-93e2-4296-aed3-bcfef9830bff\") " pod="service-telemetry/infrawatch-operators-f85lr" Oct 07 00:33:15 crc kubenswrapper[4791]: I1007 00:33:15.405990 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwh6c\" (UniqueName: \"kubernetes.io/projected/5ecefac5-93e2-4296-aed3-bcfef9830bff-kube-api-access-rwh6c\") pod \"infrawatch-operators-f85lr\" (UID: \"5ecefac5-93e2-4296-aed3-bcfef9830bff\") " pod="service-telemetry/infrawatch-operators-f85lr" Oct 07 00:33:15 crc kubenswrapper[4791]: I1007 00:33:15.588175 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-f85lr" Oct 07 00:33:15 crc kubenswrapper[4791]: I1007 00:33:15.781379 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-f85lr"] Oct 07 00:33:15 crc kubenswrapper[4791]: I1007 00:33:15.789789 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 00:33:16 crc kubenswrapper[4791]: I1007 00:33:16.591686 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-f85lr" event={"ID":"5ecefac5-93e2-4296-aed3-bcfef9830bff","Type":"ContainerStarted","Data":"b67eb90182ce1df27395a0f4014b282e6569e53200028234aac6d78269cf2403"} Oct 07 00:33:20 crc kubenswrapper[4791]: I1007 00:33:20.260766 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-f85lr"] Oct 07 00:33:21 crc kubenswrapper[4791]: I1007 00:33:21.075161 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-mf4bp"] Oct 07 00:33:21 crc kubenswrapper[4791]: I1007 00:33:21.075983 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-mf4bp" Oct 07 00:33:21 crc kubenswrapper[4791]: I1007 00:33:21.080984 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-mf4bp"] Oct 07 00:33:21 crc kubenswrapper[4791]: I1007 00:33:21.195274 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8xvs\" (UniqueName: \"kubernetes.io/projected/b1eacf1c-9a88-4a13-bb7f-bd9643ae2ff7-kube-api-access-z8xvs\") pod \"infrawatch-operators-mf4bp\" (UID: \"b1eacf1c-9a88-4a13-bb7f-bd9643ae2ff7\") " pod="service-telemetry/infrawatch-operators-mf4bp" Oct 07 00:33:21 crc kubenswrapper[4791]: I1007 00:33:21.296649 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8xvs\" (UniqueName: \"kubernetes.io/projected/b1eacf1c-9a88-4a13-bb7f-bd9643ae2ff7-kube-api-access-z8xvs\") pod \"infrawatch-operators-mf4bp\" (UID: \"b1eacf1c-9a88-4a13-bb7f-bd9643ae2ff7\") " pod="service-telemetry/infrawatch-operators-mf4bp" Oct 07 00:33:21 crc kubenswrapper[4791]: I1007 00:33:21.319616 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8xvs\" (UniqueName: \"kubernetes.io/projected/b1eacf1c-9a88-4a13-bb7f-bd9643ae2ff7-kube-api-access-z8xvs\") pod \"infrawatch-operators-mf4bp\" (UID: \"b1eacf1c-9a88-4a13-bb7f-bd9643ae2ff7\") " pod="service-telemetry/infrawatch-operators-mf4bp" Oct 07 00:33:21 crc kubenswrapper[4791]: I1007 00:33:21.403761 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-mf4bp" Oct 07 00:33:25 crc kubenswrapper[4791]: I1007 00:33:25.434129 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-mf4bp"] Oct 07 00:33:25 crc kubenswrapper[4791]: I1007 00:33:25.653963 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-mf4bp" event={"ID":"b1eacf1c-9a88-4a13-bb7f-bd9643ae2ff7","Type":"ContainerStarted","Data":"b63c445fe2e1a4657ef91a9a8f9248c57623d91d348982fff05a5c8fbae43872"} Oct 07 00:33:25 crc kubenswrapper[4791]: I1007 00:33:25.655240 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-f85lr" event={"ID":"5ecefac5-93e2-4296-aed3-bcfef9830bff","Type":"ContainerStarted","Data":"e871e7a0e6118b93ff4f2fc33f0fb09f35b452ae7ab3747f80db59fd3370e8e9"} Oct 07 00:33:25 crc kubenswrapper[4791]: I1007 00:33:25.655447 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/infrawatch-operators-f85lr" podUID="5ecefac5-93e2-4296-aed3-bcfef9830bff" containerName="registry-server" containerID="cri-o://e871e7a0e6118b93ff4f2fc33f0fb09f35b452ae7ab3747f80db59fd3370e8e9" gracePeriod=2 Oct 07 00:33:25 crc kubenswrapper[4791]: I1007 00:33:25.673806 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-f85lr" podStartSLOduration=1.082514345 podStartE2EDuration="10.673784774s" podCreationTimestamp="2025-10-07 00:33:15 +0000 UTC" firstStartedPulling="2025-10-07 00:33:15.789518605 +0000 UTC m=+1322.385456256" lastFinishedPulling="2025-10-07 00:33:25.380789044 +0000 UTC m=+1331.976726685" observedRunningTime="2025-10-07 00:33:25.66638048 +0000 UTC m=+1332.262318131" watchObservedRunningTime="2025-10-07 00:33:25.673784774 +0000 UTC m=+1332.269722425" Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.002007 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-f85lr" Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.163526 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwh6c\" (UniqueName: \"kubernetes.io/projected/5ecefac5-93e2-4296-aed3-bcfef9830bff-kube-api-access-rwh6c\") pod \"5ecefac5-93e2-4296-aed3-bcfef9830bff\" (UID: \"5ecefac5-93e2-4296-aed3-bcfef9830bff\") " Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.170850 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ecefac5-93e2-4296-aed3-bcfef9830bff-kube-api-access-rwh6c" (OuterVolumeSpecName: "kube-api-access-rwh6c") pod "5ecefac5-93e2-4296-aed3-bcfef9830bff" (UID: "5ecefac5-93e2-4296-aed3-bcfef9830bff"). InnerVolumeSpecName "kube-api-access-rwh6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.265739 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwh6c\" (UniqueName: \"kubernetes.io/projected/5ecefac5-93e2-4296-aed3-bcfef9830bff-kube-api-access-rwh6c\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.669252 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-mf4bp" event={"ID":"b1eacf1c-9a88-4a13-bb7f-bd9643ae2ff7","Type":"ContainerStarted","Data":"63238874571cb3f2e4f6615a771adf34e7d7a941f125284c96ea09d12cbf0c4d"} Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.672922 4791 generic.go:334] "Generic (PLEG): container finished" podID="5ecefac5-93e2-4296-aed3-bcfef9830bff" containerID="e871e7a0e6118b93ff4f2fc33f0fb09f35b452ae7ab3747f80db59fd3370e8e9" exitCode=0 Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.672960 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-f85lr" event={"ID":"5ecefac5-93e2-4296-aed3-bcfef9830bff","Type":"ContainerDied","Data":"e871e7a0e6118b93ff4f2fc33f0fb09f35b452ae7ab3747f80db59fd3370e8e9"} Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.672979 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-f85lr" event={"ID":"5ecefac5-93e2-4296-aed3-bcfef9830bff","Type":"ContainerDied","Data":"b67eb90182ce1df27395a0f4014b282e6569e53200028234aac6d78269cf2403"} Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.672998 4791 scope.go:117] "RemoveContainer" containerID="e871e7a0e6118b93ff4f2fc33f0fb09f35b452ae7ab3747f80db59fd3370e8e9" Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.673152 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-f85lr" Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.688011 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-mf4bp" podStartSLOduration=5.597023712 podStartE2EDuration="5.687983739s" podCreationTimestamp="2025-10-07 00:33:21 +0000 UTC" firstStartedPulling="2025-10-07 00:33:25.452490484 +0000 UTC m=+1332.048428135" lastFinishedPulling="2025-10-07 00:33:25.543450511 +0000 UTC m=+1332.139388162" observedRunningTime="2025-10-07 00:33:26.683474958 +0000 UTC m=+1333.279412649" watchObservedRunningTime="2025-10-07 00:33:26.687983739 +0000 UTC m=+1333.283921390" Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.701074 4791 scope.go:117] "RemoveContainer" containerID="e871e7a0e6118b93ff4f2fc33f0fb09f35b452ae7ab3747f80db59fd3370e8e9" Oct 07 00:33:26 crc kubenswrapper[4791]: E1007 00:33:26.701824 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e871e7a0e6118b93ff4f2fc33f0fb09f35b452ae7ab3747f80db59fd3370e8e9\": container with ID starting with e871e7a0e6118b93ff4f2fc33f0fb09f35b452ae7ab3747f80db59fd3370e8e9 not found: ID does not exist" containerID="e871e7a0e6118b93ff4f2fc33f0fb09f35b452ae7ab3747f80db59fd3370e8e9" Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.701880 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e871e7a0e6118b93ff4f2fc33f0fb09f35b452ae7ab3747f80db59fd3370e8e9"} err="failed to get container status \"e871e7a0e6118b93ff4f2fc33f0fb09f35b452ae7ab3747f80db59fd3370e8e9\": rpc error: code = NotFound desc = could not find container \"e871e7a0e6118b93ff4f2fc33f0fb09f35b452ae7ab3747f80db59fd3370e8e9\": container with ID starting with e871e7a0e6118b93ff4f2fc33f0fb09f35b452ae7ab3747f80db59fd3370e8e9 not found: ID does not exist" Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.709073 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-f85lr"] Oct 07 00:33:26 crc kubenswrapper[4791]: I1007 00:33:26.717618 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/infrawatch-operators-f85lr"] Oct 07 00:33:28 crc kubenswrapper[4791]: I1007 00:33:28.076339 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ecefac5-93e2-4296-aed3-bcfef9830bff" path="/var/lib/kubelet/pods/5ecefac5-93e2-4296-aed3-bcfef9830bff/volumes" Oct 07 00:33:31 crc kubenswrapper[4791]: I1007 00:33:31.404548 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-mf4bp" Oct 07 00:33:31 crc kubenswrapper[4791]: I1007 00:33:31.405291 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-mf4bp" Oct 07 00:33:31 crc kubenswrapper[4791]: I1007 00:33:31.431155 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-mf4bp" Oct 07 00:33:31 crc kubenswrapper[4791]: I1007 00:33:31.731077 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-mf4bp" Oct 07 00:33:35 crc kubenswrapper[4791]: I1007 00:33:35.904901 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp"] Oct 07 00:33:35 crc kubenswrapper[4791]: E1007 00:33:35.905458 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ecefac5-93e2-4296-aed3-bcfef9830bff" containerName="registry-server" Oct 07 00:33:35 crc kubenswrapper[4791]: I1007 00:33:35.905485 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ecefac5-93e2-4296-aed3-bcfef9830bff" containerName="registry-server" Oct 07 00:33:35 crc kubenswrapper[4791]: I1007 00:33:35.905634 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ecefac5-93e2-4296-aed3-bcfef9830bff" containerName="registry-server" Oct 07 00:33:35 crc kubenswrapper[4791]: I1007 00:33:35.906806 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" Oct 07 00:33:35 crc kubenswrapper[4791]: I1007 00:33:35.930034 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp"] Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.009880 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb9xq\" (UniqueName: \"kubernetes.io/projected/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-kube-api-access-mb9xq\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp\" (UID: \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.010045 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-bundle\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp\" (UID: \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.010117 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-util\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp\" (UID: \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.111519 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb9xq\" (UniqueName: \"kubernetes.io/projected/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-kube-api-access-mb9xq\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp\" (UID: \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.111573 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-bundle\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp\" (UID: \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.111597 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-util\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp\" (UID: \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.112050 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-util\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp\" (UID: \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.112071 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-bundle\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp\" (UID: \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.131129 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb9xq\" (UniqueName: \"kubernetes.io/projected/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-kube-api-access-mb9xq\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp\" (UID: \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.236728 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.439346 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp"] Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.702371 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq"] Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.704080 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.717686 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq"] Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.736172 4791 generic.go:334] "Generic (PLEG): container finished" podID="a100f8bc-8ccb-4c2d-a6cd-1418073dbf21" containerID="8e3f55f4a13a5dcb68ff61568a3b3cfd0d78d9b0b27213f5b1f35a1e42db4e37" exitCode=0 Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.736221 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" event={"ID":"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21","Type":"ContainerDied","Data":"8e3f55f4a13a5dcb68ff61568a3b3cfd0d78d9b0b27213f5b1f35a1e42db4e37"} Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.736251 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" event={"ID":"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21","Type":"ContainerStarted","Data":"467590a0e43e2ee7325bac13031648b8dfa3fadaa9a6b2a6ebad8f881946dda1"} Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.826400 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zv6l\" (UniqueName: \"kubernetes.io/projected/219f101b-a6db-40c3-92ac-36820e477a7c-kube-api-access-2zv6l\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq\" (UID: \"219f101b-a6db-40c3-92ac-36820e477a7c\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.826871 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/219f101b-a6db-40c3-92ac-36820e477a7c-util\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq\" (UID: \"219f101b-a6db-40c3-92ac-36820e477a7c\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.826891 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/219f101b-a6db-40c3-92ac-36820e477a7c-bundle\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq\" (UID: \"219f101b-a6db-40c3-92ac-36820e477a7c\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.927887 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zv6l\" (UniqueName: \"kubernetes.io/projected/219f101b-a6db-40c3-92ac-36820e477a7c-kube-api-access-2zv6l\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq\" (UID: \"219f101b-a6db-40c3-92ac-36820e477a7c\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.927936 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/219f101b-a6db-40c3-92ac-36820e477a7c-util\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq\" (UID: \"219f101b-a6db-40c3-92ac-36820e477a7c\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.927962 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/219f101b-a6db-40c3-92ac-36820e477a7c-bundle\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq\" (UID: \"219f101b-a6db-40c3-92ac-36820e477a7c\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.928645 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/219f101b-a6db-40c3-92ac-36820e477a7c-bundle\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq\" (UID: \"219f101b-a6db-40c3-92ac-36820e477a7c\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.928814 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/219f101b-a6db-40c3-92ac-36820e477a7c-util\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq\" (UID: \"219f101b-a6db-40c3-92ac-36820e477a7c\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" Oct 07 00:33:36 crc kubenswrapper[4791]: I1007 00:33:36.946516 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zv6l\" (UniqueName: \"kubernetes.io/projected/219f101b-a6db-40c3-92ac-36820e477a7c-kube-api-access-2zv6l\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq\" (UID: \"219f101b-a6db-40c3-92ac-36820e477a7c\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" Oct 07 00:33:37 crc kubenswrapper[4791]: I1007 00:33:37.072684 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" Oct 07 00:33:37 crc kubenswrapper[4791]: I1007 00:33:37.319177 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq"] Oct 07 00:33:37 crc kubenswrapper[4791]: I1007 00:33:37.743101 4791 generic.go:334] "Generic (PLEG): container finished" podID="219f101b-a6db-40c3-92ac-36820e477a7c" containerID="c67b8053da70483b91152a1c17fb9e8f29372157116d063e5df43e888ea2eb2b" exitCode=0 Oct 07 00:33:37 crc kubenswrapper[4791]: I1007 00:33:37.743212 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" event={"ID":"219f101b-a6db-40c3-92ac-36820e477a7c","Type":"ContainerDied","Data":"c67b8053da70483b91152a1c17fb9e8f29372157116d063e5df43e888ea2eb2b"} Oct 07 00:33:37 crc kubenswrapper[4791]: I1007 00:33:37.743890 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" event={"ID":"219f101b-a6db-40c3-92ac-36820e477a7c","Type":"ContainerStarted","Data":"55ea37a1b403fe95484fe7ebe51d432b862aac374f59b5396bc849b52d803ffa"} Oct 07 00:33:37 crc kubenswrapper[4791]: I1007 00:33:37.745722 4791 generic.go:334] "Generic (PLEG): container finished" podID="a100f8bc-8ccb-4c2d-a6cd-1418073dbf21" containerID="8a3092235c864a975646b39ac40999a656ccac187cf4601349473201de886a0d" exitCode=0 Oct 07 00:33:37 crc kubenswrapper[4791]: I1007 00:33:37.745774 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" event={"ID":"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21","Type":"ContainerDied","Data":"8a3092235c864a975646b39ac40999a656ccac187cf4601349473201de886a0d"} Oct 07 00:33:38 crc kubenswrapper[4791]: I1007 00:33:38.755391 4791 generic.go:334] "Generic (PLEG): container finished" podID="219f101b-a6db-40c3-92ac-36820e477a7c" containerID="a5a8fca9d9997231ca1b8519dca8de2c5cdbbec4189950cc5db6e0e671f02a35" exitCode=0 Oct 07 00:33:38 crc kubenswrapper[4791]: I1007 00:33:38.755434 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" event={"ID":"219f101b-a6db-40c3-92ac-36820e477a7c","Type":"ContainerDied","Data":"a5a8fca9d9997231ca1b8519dca8de2c5cdbbec4189950cc5db6e0e671f02a35"} Oct 07 00:33:38 crc kubenswrapper[4791]: I1007 00:33:38.766198 4791 generic.go:334] "Generic (PLEG): container finished" podID="a100f8bc-8ccb-4c2d-a6cd-1418073dbf21" containerID="7ab27437cc4c3fee28e83c4dd28767e841e3ced16a6584f4555e383f1657bece" exitCode=0 Oct 07 00:33:38 crc kubenswrapper[4791]: I1007 00:33:38.766250 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" event={"ID":"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21","Type":"ContainerDied","Data":"7ab27437cc4c3fee28e83c4dd28767e841e3ced16a6584f4555e383f1657bece"} Oct 07 00:33:39 crc kubenswrapper[4791]: I1007 00:33:39.774497 4791 generic.go:334] "Generic (PLEG): container finished" podID="219f101b-a6db-40c3-92ac-36820e477a7c" containerID="487f2f08af33aca387afb6154a73ab8c9cafd0df039e27e8687172742c5f031f" exitCode=0 Oct 07 00:33:39 crc kubenswrapper[4791]: I1007 00:33:39.774635 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" event={"ID":"219f101b-a6db-40c3-92ac-36820e477a7c","Type":"ContainerDied","Data":"487f2f08af33aca387afb6154a73ab8c9cafd0df039e27e8687172742c5f031f"} Oct 07 00:33:40 crc kubenswrapper[4791]: I1007 00:33:40.080186 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" Oct 07 00:33:40 crc kubenswrapper[4791]: I1007 00:33:40.170723 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-bundle\") pod \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\" (UID: \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\") " Oct 07 00:33:40 crc kubenswrapper[4791]: I1007 00:33:40.170803 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-util\") pod \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\" (UID: \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\") " Oct 07 00:33:40 crc kubenswrapper[4791]: I1007 00:33:40.170856 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mb9xq\" (UniqueName: \"kubernetes.io/projected/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-kube-api-access-mb9xq\") pod \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\" (UID: \"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21\") " Oct 07 00:33:40 crc kubenswrapper[4791]: I1007 00:33:40.171481 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-bundle" (OuterVolumeSpecName: "bundle") pod "a100f8bc-8ccb-4c2d-a6cd-1418073dbf21" (UID: "a100f8bc-8ccb-4c2d-a6cd-1418073dbf21"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:33:40 crc kubenswrapper[4791]: I1007 00:33:40.177914 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-kube-api-access-mb9xq" (OuterVolumeSpecName: "kube-api-access-mb9xq") pod "a100f8bc-8ccb-4c2d-a6cd-1418073dbf21" (UID: "a100f8bc-8ccb-4c2d-a6cd-1418073dbf21"). InnerVolumeSpecName "kube-api-access-mb9xq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:33:40 crc kubenswrapper[4791]: I1007 00:33:40.212649 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-util" (OuterVolumeSpecName: "util") pod "a100f8bc-8ccb-4c2d-a6cd-1418073dbf21" (UID: "a100f8bc-8ccb-4c2d-a6cd-1418073dbf21"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:33:40 crc kubenswrapper[4791]: I1007 00:33:40.272428 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mb9xq\" (UniqueName: \"kubernetes.io/projected/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-kube-api-access-mb9xq\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:40 crc kubenswrapper[4791]: I1007 00:33:40.272466 4791 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:40 crc kubenswrapper[4791]: I1007 00:33:40.272475 4791 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a100f8bc-8ccb-4c2d-a6cd-1418073dbf21-util\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:40 crc kubenswrapper[4791]: I1007 00:33:40.786096 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" event={"ID":"a100f8bc-8ccb-4c2d-a6cd-1418073dbf21","Type":"ContainerDied","Data":"467590a0e43e2ee7325bac13031648b8dfa3fadaa9a6b2a6ebad8f881946dda1"} Oct 07 00:33:40 crc kubenswrapper[4791]: I1007 00:33:40.786143 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c096nndp" Oct 07 00:33:40 crc kubenswrapper[4791]: I1007 00:33:40.786175 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="467590a0e43e2ee7325bac13031648b8dfa3fadaa9a6b2a6ebad8f881946dda1" Oct 07 00:33:41 crc kubenswrapper[4791]: I1007 00:33:41.014378 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" Oct 07 00:33:41 crc kubenswrapper[4791]: I1007 00:33:41.083420 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/219f101b-a6db-40c3-92ac-36820e477a7c-util\") pod \"219f101b-a6db-40c3-92ac-36820e477a7c\" (UID: \"219f101b-a6db-40c3-92ac-36820e477a7c\") " Oct 07 00:33:41 crc kubenswrapper[4791]: I1007 00:33:41.083497 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/219f101b-a6db-40c3-92ac-36820e477a7c-bundle\") pod \"219f101b-a6db-40c3-92ac-36820e477a7c\" (UID: \"219f101b-a6db-40c3-92ac-36820e477a7c\") " Oct 07 00:33:41 crc kubenswrapper[4791]: I1007 00:33:41.083566 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zv6l\" (UniqueName: \"kubernetes.io/projected/219f101b-a6db-40c3-92ac-36820e477a7c-kube-api-access-2zv6l\") pod \"219f101b-a6db-40c3-92ac-36820e477a7c\" (UID: \"219f101b-a6db-40c3-92ac-36820e477a7c\") " Oct 07 00:33:41 crc kubenswrapper[4791]: I1007 00:33:41.084802 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/219f101b-a6db-40c3-92ac-36820e477a7c-bundle" (OuterVolumeSpecName: "bundle") pod "219f101b-a6db-40c3-92ac-36820e477a7c" (UID: "219f101b-a6db-40c3-92ac-36820e477a7c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:33:41 crc kubenswrapper[4791]: I1007 00:33:41.094657 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/219f101b-a6db-40c3-92ac-36820e477a7c-kube-api-access-2zv6l" (OuterVolumeSpecName: "kube-api-access-2zv6l") pod "219f101b-a6db-40c3-92ac-36820e477a7c" (UID: "219f101b-a6db-40c3-92ac-36820e477a7c"). InnerVolumeSpecName "kube-api-access-2zv6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:33:41 crc kubenswrapper[4791]: I1007 00:33:41.104515 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/219f101b-a6db-40c3-92ac-36820e477a7c-util" (OuterVolumeSpecName: "util") pod "219f101b-a6db-40c3-92ac-36820e477a7c" (UID: "219f101b-a6db-40c3-92ac-36820e477a7c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:33:41 crc kubenswrapper[4791]: I1007 00:33:41.184862 4791 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/219f101b-a6db-40c3-92ac-36820e477a7c-util\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:41 crc kubenswrapper[4791]: I1007 00:33:41.184917 4791 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/219f101b-a6db-40c3-92ac-36820e477a7c-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:41 crc kubenswrapper[4791]: I1007 00:33:41.184936 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zv6l\" (UniqueName: \"kubernetes.io/projected/219f101b-a6db-40c3-92ac-36820e477a7c-kube-api-access-2zv6l\") on node \"crc\" DevicePath \"\"" Oct 07 00:33:41 crc kubenswrapper[4791]: I1007 00:33:41.799830 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" event={"ID":"219f101b-a6db-40c3-92ac-36820e477a7c","Type":"ContainerDied","Data":"55ea37a1b403fe95484fe7ebe51d432b862aac374f59b5396bc849b52d803ffa"} Oct 07 00:33:41 crc kubenswrapper[4791]: I1007 00:33:41.799907 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65afvwtq" Oct 07 00:33:41 crc kubenswrapper[4791]: I1007 00:33:41.799929 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55ea37a1b403fe95484fe7ebe51d432b862aac374f59b5396bc849b52d803ffa" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.083354 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ttt79"] Oct 07 00:33:42 crc kubenswrapper[4791]: E1007 00:33:42.083738 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="219f101b-a6db-40c3-92ac-36820e477a7c" containerName="extract" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.083757 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="219f101b-a6db-40c3-92ac-36820e477a7c" containerName="extract" Oct 07 00:33:42 crc kubenswrapper[4791]: E1007 00:33:42.083769 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="219f101b-a6db-40c3-92ac-36820e477a7c" containerName="pull" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.083779 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="219f101b-a6db-40c3-92ac-36820e477a7c" containerName="pull" Oct 07 00:33:42 crc kubenswrapper[4791]: E1007 00:33:42.083787 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a100f8bc-8ccb-4c2d-a6cd-1418073dbf21" containerName="extract" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.083793 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="a100f8bc-8ccb-4c2d-a6cd-1418073dbf21" containerName="extract" Oct 07 00:33:42 crc kubenswrapper[4791]: E1007 00:33:42.083805 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="219f101b-a6db-40c3-92ac-36820e477a7c" containerName="util" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.083813 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="219f101b-a6db-40c3-92ac-36820e477a7c" containerName="util" Oct 07 00:33:42 crc kubenswrapper[4791]: E1007 00:33:42.083830 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a100f8bc-8ccb-4c2d-a6cd-1418073dbf21" containerName="pull" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.083836 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="a100f8bc-8ccb-4c2d-a6cd-1418073dbf21" containerName="pull" Oct 07 00:33:42 crc kubenswrapper[4791]: E1007 00:33:42.083848 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a100f8bc-8ccb-4c2d-a6cd-1418073dbf21" containerName="util" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.083855 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="a100f8bc-8ccb-4c2d-a6cd-1418073dbf21" containerName="util" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.083995 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="219f101b-a6db-40c3-92ac-36820e477a7c" containerName="extract" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.084019 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="a100f8bc-8ccb-4c2d-a6cd-1418073dbf21" containerName="extract" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.085257 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.094009 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ttt79"] Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.201666 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpgfx\" (UniqueName: \"kubernetes.io/projected/65a89651-a561-40b8-900b-a1d76119c439-kube-api-access-dpgfx\") pod \"community-operators-ttt79\" (UID: \"65a89651-a561-40b8-900b-a1d76119c439\") " pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.201740 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a89651-a561-40b8-900b-a1d76119c439-utilities\") pod \"community-operators-ttt79\" (UID: \"65a89651-a561-40b8-900b-a1d76119c439\") " pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.202088 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a89651-a561-40b8-900b-a1d76119c439-catalog-content\") pod \"community-operators-ttt79\" (UID: \"65a89651-a561-40b8-900b-a1d76119c439\") " pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.303350 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a89651-a561-40b8-900b-a1d76119c439-catalog-content\") pod \"community-operators-ttt79\" (UID: \"65a89651-a561-40b8-900b-a1d76119c439\") " pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.303440 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpgfx\" (UniqueName: \"kubernetes.io/projected/65a89651-a561-40b8-900b-a1d76119c439-kube-api-access-dpgfx\") pod \"community-operators-ttt79\" (UID: \"65a89651-a561-40b8-900b-a1d76119c439\") " pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.303474 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a89651-a561-40b8-900b-a1d76119c439-utilities\") pod \"community-operators-ttt79\" (UID: \"65a89651-a561-40b8-900b-a1d76119c439\") " pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.303867 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a89651-a561-40b8-900b-a1d76119c439-catalog-content\") pod \"community-operators-ttt79\" (UID: \"65a89651-a561-40b8-900b-a1d76119c439\") " pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.303944 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a89651-a561-40b8-900b-a1d76119c439-utilities\") pod \"community-operators-ttt79\" (UID: \"65a89651-a561-40b8-900b-a1d76119c439\") " pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.321024 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpgfx\" (UniqueName: \"kubernetes.io/projected/65a89651-a561-40b8-900b-a1d76119c439-kube-api-access-dpgfx\") pod \"community-operators-ttt79\" (UID: \"65a89651-a561-40b8-900b-a1d76119c439\") " pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.401807 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.693009 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ttt79"] Oct 07 00:33:42 crc kubenswrapper[4791]: W1007 00:33:42.699444 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65a89651_a561_40b8_900b_a1d76119c439.slice/crio-53847cd42249aa2076bed09d7a1744902130a90702cafc80af378b8406ca52f7 WatchSource:0}: Error finding container 53847cd42249aa2076bed09d7a1744902130a90702cafc80af378b8406ca52f7: Status 404 returned error can't find the container with id 53847cd42249aa2076bed09d7a1744902130a90702cafc80af378b8406ca52f7 Oct 07 00:33:42 crc kubenswrapper[4791]: I1007 00:33:42.808180 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttt79" event={"ID":"65a89651-a561-40b8-900b-a1d76119c439","Type":"ContainerStarted","Data":"53847cd42249aa2076bed09d7a1744902130a90702cafc80af378b8406ca52f7"} Oct 07 00:33:43 crc kubenswrapper[4791]: I1007 00:33:43.817863 4791 generic.go:334] "Generic (PLEG): container finished" podID="65a89651-a561-40b8-900b-a1d76119c439" containerID="3047685a7ea1df8591158b2b9a105d1c061a6ccbfc2b1c986880b5a530239d77" exitCode=0 Oct 07 00:33:43 crc kubenswrapper[4791]: I1007 00:33:43.818503 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttt79" event={"ID":"65a89651-a561-40b8-900b-a1d76119c439","Type":"ContainerDied","Data":"3047685a7ea1df8591158b2b9a105d1c061a6ccbfc2b1c986880b5a530239d77"} Oct 07 00:33:44 crc kubenswrapper[4791]: I1007 00:33:44.826967 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttt79" event={"ID":"65a89651-a561-40b8-900b-a1d76119c439","Type":"ContainerStarted","Data":"0a392fe422b39c959e6719aacc87967cb1b9e03d054a4b7b47a315a25d0a9398"} Oct 07 00:33:45 crc kubenswrapper[4791]: I1007 00:33:45.851194 4791 generic.go:334] "Generic (PLEG): container finished" podID="65a89651-a561-40b8-900b-a1d76119c439" containerID="0a392fe422b39c959e6719aacc87967cb1b9e03d054a4b7b47a315a25d0a9398" exitCode=0 Oct 07 00:33:45 crc kubenswrapper[4791]: I1007 00:33:45.851317 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttt79" event={"ID":"65a89651-a561-40b8-900b-a1d76119c439","Type":"ContainerDied","Data":"0a392fe422b39c959e6719aacc87967cb1b9e03d054a4b7b47a315a25d0a9398"} Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.079440 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-68b9c75957-c2wzb"] Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.080509 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-68b9c75957-c2wzb" Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.089947 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-68b9c75957-c2wzb"] Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.094605 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-operator-dockercfg-h4ztm" Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.157276 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wj8z7\" (UniqueName: \"kubernetes.io/projected/4ce6e294-f1e5-446b-a55f-fdee99b8b961-kube-api-access-wj8z7\") pod \"service-telemetry-operator-68b9c75957-c2wzb\" (UID: \"4ce6e294-f1e5-446b-a55f-fdee99b8b961\") " pod="service-telemetry/service-telemetry-operator-68b9c75957-c2wzb" Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.157344 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/4ce6e294-f1e5-446b-a55f-fdee99b8b961-runner\") pod \"service-telemetry-operator-68b9c75957-c2wzb\" (UID: \"4ce6e294-f1e5-446b-a55f-fdee99b8b961\") " pod="service-telemetry/service-telemetry-operator-68b9c75957-c2wzb" Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.259387 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wj8z7\" (UniqueName: \"kubernetes.io/projected/4ce6e294-f1e5-446b-a55f-fdee99b8b961-kube-api-access-wj8z7\") pod \"service-telemetry-operator-68b9c75957-c2wzb\" (UID: \"4ce6e294-f1e5-446b-a55f-fdee99b8b961\") " pod="service-telemetry/service-telemetry-operator-68b9c75957-c2wzb" Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.259494 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/4ce6e294-f1e5-446b-a55f-fdee99b8b961-runner\") pod \"service-telemetry-operator-68b9c75957-c2wzb\" (UID: \"4ce6e294-f1e5-446b-a55f-fdee99b8b961\") " pod="service-telemetry/service-telemetry-operator-68b9c75957-c2wzb" Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.260111 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/4ce6e294-f1e5-446b-a55f-fdee99b8b961-runner\") pod \"service-telemetry-operator-68b9c75957-c2wzb\" (UID: \"4ce6e294-f1e5-446b-a55f-fdee99b8b961\") " pod="service-telemetry/service-telemetry-operator-68b9c75957-c2wzb" Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.280720 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wj8z7\" (UniqueName: \"kubernetes.io/projected/4ce6e294-f1e5-446b-a55f-fdee99b8b961-kube-api-access-wj8z7\") pod \"service-telemetry-operator-68b9c75957-c2wzb\" (UID: \"4ce6e294-f1e5-446b-a55f-fdee99b8b961\") " pod="service-telemetry/service-telemetry-operator-68b9c75957-c2wzb" Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.397775 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-68b9c75957-c2wzb" Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.667681 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-68b9c75957-c2wzb"] Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.866112 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttt79" event={"ID":"65a89651-a561-40b8-900b-a1d76119c439","Type":"ContainerStarted","Data":"3b0925a74c11102048f9c59e3a68104cdaf21226b68c9c58ac807c12f2b3c1cd"} Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.870164 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-68b9c75957-c2wzb" event={"ID":"4ce6e294-f1e5-446b-a55f-fdee99b8b961","Type":"ContainerStarted","Data":"6a4ba6d7a4903acf49a1020569590ec264fca7b48974fdf3058d3e71d71be97a"} Oct 07 00:33:46 crc kubenswrapper[4791]: I1007 00:33:46.894774 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ttt79" podStartSLOduration=2.282502436 podStartE2EDuration="4.894749895s" podCreationTimestamp="2025-10-07 00:33:42 +0000 UTC" firstStartedPulling="2025-10-07 00:33:43.819366803 +0000 UTC m=+1350.415304454" lastFinishedPulling="2025-10-07 00:33:46.431614262 +0000 UTC m=+1353.027551913" observedRunningTime="2025-10-07 00:33:46.892765927 +0000 UTC m=+1353.488703578" watchObservedRunningTime="2025-10-07 00:33:46.894749895 +0000 UTC m=+1353.490687546" Oct 07 00:33:48 crc kubenswrapper[4791]: I1007 00:33:48.829380 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-5d7865cd-khh29"] Oct 07 00:33:48 crc kubenswrapper[4791]: I1007 00:33:48.830583 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-5d7865cd-khh29" Oct 07 00:33:48 crc kubenswrapper[4791]: I1007 00:33:48.832310 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-operator-dockercfg-5nxnf" Oct 07 00:33:48 crc kubenswrapper[4791]: I1007 00:33:48.839502 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-5d7865cd-khh29"] Oct 07 00:33:48 crc kubenswrapper[4791]: I1007 00:33:48.907315 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4htcp\" (UniqueName: \"kubernetes.io/projected/b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e-kube-api-access-4htcp\") pod \"smart-gateway-operator-5d7865cd-khh29\" (UID: \"b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e\") " pod="service-telemetry/smart-gateway-operator-5d7865cd-khh29" Oct 07 00:33:48 crc kubenswrapper[4791]: I1007 00:33:48.907379 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e-runner\") pod \"smart-gateway-operator-5d7865cd-khh29\" (UID: \"b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e\") " pod="service-telemetry/smart-gateway-operator-5d7865cd-khh29" Oct 07 00:33:49 crc kubenswrapper[4791]: I1007 00:33:49.008618 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4htcp\" (UniqueName: \"kubernetes.io/projected/b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e-kube-api-access-4htcp\") pod \"smart-gateway-operator-5d7865cd-khh29\" (UID: \"b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e\") " pod="service-telemetry/smart-gateway-operator-5d7865cd-khh29" Oct 07 00:33:49 crc kubenswrapper[4791]: I1007 00:33:49.008708 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e-runner\") pod \"smart-gateway-operator-5d7865cd-khh29\" (UID: \"b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e\") " pod="service-telemetry/smart-gateway-operator-5d7865cd-khh29" Oct 07 00:33:49 crc kubenswrapper[4791]: I1007 00:33:49.009437 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e-runner\") pod \"smart-gateway-operator-5d7865cd-khh29\" (UID: \"b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e\") " pod="service-telemetry/smart-gateway-operator-5d7865cd-khh29" Oct 07 00:33:49 crc kubenswrapper[4791]: I1007 00:33:49.036128 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4htcp\" (UniqueName: \"kubernetes.io/projected/b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e-kube-api-access-4htcp\") pod \"smart-gateway-operator-5d7865cd-khh29\" (UID: \"b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e\") " pod="service-telemetry/smart-gateway-operator-5d7865cd-khh29" Oct 07 00:33:49 crc kubenswrapper[4791]: I1007 00:33:49.152392 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-5d7865cd-khh29" Oct 07 00:33:49 crc kubenswrapper[4791]: I1007 00:33:49.404048 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-5d7865cd-khh29"] Oct 07 00:33:49 crc kubenswrapper[4791]: I1007 00:33:49.901716 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-5d7865cd-khh29" event={"ID":"b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e","Type":"ContainerStarted","Data":"4b364440d17d6b6fdb4041a817bb625ed53281e972c7739c61b040f775097d93"} Oct 07 00:33:52 crc kubenswrapper[4791]: I1007 00:33:52.403737 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:52 crc kubenswrapper[4791]: I1007 00:33:52.404156 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:52 crc kubenswrapper[4791]: I1007 00:33:52.460974 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:52 crc kubenswrapper[4791]: I1007 00:33:52.997796 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:33:55 crc kubenswrapper[4791]: I1007 00:33:55.668148 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ttt79"] Oct 07 00:33:55 crc kubenswrapper[4791]: I1007 00:33:55.668381 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ttt79" podUID="65a89651-a561-40b8-900b-a1d76119c439" containerName="registry-server" containerID="cri-o://3b0925a74c11102048f9c59e3a68104cdaf21226b68c9c58ac807c12f2b3c1cd" gracePeriod=2 Oct 07 00:33:56 crc kubenswrapper[4791]: I1007 00:33:56.993910 4791 generic.go:334] "Generic (PLEG): container finished" podID="65a89651-a561-40b8-900b-a1d76119c439" containerID="3b0925a74c11102048f9c59e3a68104cdaf21226b68c9c58ac807c12f2b3c1cd" exitCode=0 Oct 07 00:33:56 crc kubenswrapper[4791]: I1007 00:33:56.993959 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttt79" event={"ID":"65a89651-a561-40b8-900b-a1d76119c439","Type":"ContainerDied","Data":"3b0925a74c11102048f9c59e3a68104cdaf21226b68c9c58ac807c12f2b3c1cd"} Oct 07 00:33:59 crc kubenswrapper[4791]: I1007 00:33:59.967262 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.024618 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttt79" event={"ID":"65a89651-a561-40b8-900b-a1d76119c439","Type":"ContainerDied","Data":"53847cd42249aa2076bed09d7a1744902130a90702cafc80af378b8406ca52f7"} Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.024667 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ttt79" Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.024678 4791 scope.go:117] "RemoveContainer" containerID="3b0925a74c11102048f9c59e3a68104cdaf21226b68c9c58ac807c12f2b3c1cd" Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.114074 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a89651-a561-40b8-900b-a1d76119c439-catalog-content\") pod \"65a89651-a561-40b8-900b-a1d76119c439\" (UID: \"65a89651-a561-40b8-900b-a1d76119c439\") " Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.114113 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpgfx\" (UniqueName: \"kubernetes.io/projected/65a89651-a561-40b8-900b-a1d76119c439-kube-api-access-dpgfx\") pod \"65a89651-a561-40b8-900b-a1d76119c439\" (UID: \"65a89651-a561-40b8-900b-a1d76119c439\") " Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.114179 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a89651-a561-40b8-900b-a1d76119c439-utilities\") pod \"65a89651-a561-40b8-900b-a1d76119c439\" (UID: \"65a89651-a561-40b8-900b-a1d76119c439\") " Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.115145 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65a89651-a561-40b8-900b-a1d76119c439-utilities" (OuterVolumeSpecName: "utilities") pod "65a89651-a561-40b8-900b-a1d76119c439" (UID: "65a89651-a561-40b8-900b-a1d76119c439"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.121779 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65a89651-a561-40b8-900b-a1d76119c439-kube-api-access-dpgfx" (OuterVolumeSpecName: "kube-api-access-dpgfx") pod "65a89651-a561-40b8-900b-a1d76119c439" (UID: "65a89651-a561-40b8-900b-a1d76119c439"). InnerVolumeSpecName "kube-api-access-dpgfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.126718 4791 scope.go:117] "RemoveContainer" containerID="0a392fe422b39c959e6719aacc87967cb1b9e03d054a4b7b47a315a25d0a9398" Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.189838 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65a89651-a561-40b8-900b-a1d76119c439-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "65a89651-a561-40b8-900b-a1d76119c439" (UID: "65a89651-a561-40b8-900b-a1d76119c439"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.215682 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a89651-a561-40b8-900b-a1d76119c439-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.215876 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpgfx\" (UniqueName: \"kubernetes.io/projected/65a89651-a561-40b8-900b-a1d76119c439-kube-api-access-dpgfx\") on node \"crc\" DevicePath \"\"" Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.215902 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a89651-a561-40b8-900b-a1d76119c439-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.359362 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ttt79"] Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.362647 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ttt79"] Oct 07 00:34:00 crc kubenswrapper[4791]: I1007 00:34:00.697225 4791 scope.go:117] "RemoveContainer" containerID="3047685a7ea1df8591158b2b9a105d1c061a6ccbfc2b1c986880b5a530239d77" Oct 07 00:34:02 crc kubenswrapper[4791]: I1007 00:34:02.078551 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65a89651-a561-40b8-900b-a1d76119c439" path="/var/lib/kubelet/pods/65a89651-a561-40b8-900b-a1d76119c439/volumes" Oct 07 00:34:04 crc kubenswrapper[4791]: I1007 00:34:04.058825 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-68b9c75957-c2wzb" event={"ID":"4ce6e294-f1e5-446b-a55f-fdee99b8b961","Type":"ContainerStarted","Data":"4a04d0d32d429346e4103033c5cc8988733c4d91a5481a4cc05142e9872aeef2"} Oct 07 00:34:04 crc kubenswrapper[4791]: I1007 00:34:04.077844 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-68b9c75957-c2wzb" podStartSLOduration=1.401536835 podStartE2EDuration="18.077828546s" podCreationTimestamp="2025-10-07 00:33:46 +0000 UTC" firstStartedPulling="2025-10-07 00:33:46.678348446 +0000 UTC m=+1353.274286097" lastFinishedPulling="2025-10-07 00:34:03.354640157 +0000 UTC m=+1369.950577808" observedRunningTime="2025-10-07 00:34:04.076498558 +0000 UTC m=+1370.672436209" watchObservedRunningTime="2025-10-07 00:34:04.077828546 +0000 UTC m=+1370.673766197" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.082548 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-5d7865cd-khh29" event={"ID":"b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e","Type":"ContainerStarted","Data":"9fdd25c26d988fd99478b4727c53cc429a16ccc6420eb49e7113e0dc06ad36e3"} Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.102163 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-5d7865cd-khh29" podStartSLOduration=2.187245253 podStartE2EDuration="19.102142543s" podCreationTimestamp="2025-10-07 00:33:48 +0000 UTC" firstStartedPulling="2025-10-07 00:33:49.440120532 +0000 UTC m=+1356.036058183" lastFinishedPulling="2025-10-07 00:34:06.355017822 +0000 UTC m=+1372.950955473" observedRunningTime="2025-10-07 00:34:07.099869097 +0000 UTC m=+1373.695806758" watchObservedRunningTime="2025-10-07 00:34:07.102142543 +0000 UTC m=+1373.698080194" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.272865 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qz46v"] Oct 07 00:34:07 crc kubenswrapper[4791]: E1007 00:34:07.273161 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a89651-a561-40b8-900b-a1d76119c439" containerName="extract-utilities" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.273177 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a89651-a561-40b8-900b-a1d76119c439" containerName="extract-utilities" Oct 07 00:34:07 crc kubenswrapper[4791]: E1007 00:34:07.273195 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a89651-a561-40b8-900b-a1d76119c439" containerName="registry-server" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.273203 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a89651-a561-40b8-900b-a1d76119c439" containerName="registry-server" Oct 07 00:34:07 crc kubenswrapper[4791]: E1007 00:34:07.273218 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a89651-a561-40b8-900b-a1d76119c439" containerName="extract-content" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.273225 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a89651-a561-40b8-900b-a1d76119c439" containerName="extract-content" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.273368 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="65a89651-a561-40b8-900b-a1d76119c439" containerName="registry-server" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.274272 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.282394 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qz46v"] Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.425857 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-utilities\") pod \"certified-operators-qz46v\" (UID: \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\") " pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.426239 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-catalog-content\") pod \"certified-operators-qz46v\" (UID: \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\") " pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.426301 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5d4q\" (UniqueName: \"kubernetes.io/projected/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-kube-api-access-p5d4q\") pod \"certified-operators-qz46v\" (UID: \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\") " pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.527076 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-utilities\") pod \"certified-operators-qz46v\" (UID: \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\") " pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.527130 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-catalog-content\") pod \"certified-operators-qz46v\" (UID: \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\") " pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.527196 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5d4q\" (UniqueName: \"kubernetes.io/projected/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-kube-api-access-p5d4q\") pod \"certified-operators-qz46v\" (UID: \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\") " pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.527657 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-utilities\") pod \"certified-operators-qz46v\" (UID: \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\") " pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.527692 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-catalog-content\") pod \"certified-operators-qz46v\" (UID: \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\") " pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.555514 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5d4q\" (UniqueName: \"kubernetes.io/projected/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-kube-api-access-p5d4q\") pod \"certified-operators-qz46v\" (UID: \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\") " pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.593967 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:07 crc kubenswrapper[4791]: I1007 00:34:07.865020 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qz46v"] Oct 07 00:34:07 crc kubenswrapper[4791]: W1007 00:34:07.879329 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6be0d1f9_c53e_4fc7_9bd9_817fb0f29eb6.slice/crio-15f1e19e0fb48a75f86fe785f0c9c65e8974a4a4ce62580de328dd3b937eaad3 WatchSource:0}: Error finding container 15f1e19e0fb48a75f86fe785f0c9c65e8974a4a4ce62580de328dd3b937eaad3: Status 404 returned error can't find the container with id 15f1e19e0fb48a75f86fe785f0c9c65e8974a4a4ce62580de328dd3b937eaad3 Oct 07 00:34:08 crc kubenswrapper[4791]: I1007 00:34:08.089915 4791 generic.go:334] "Generic (PLEG): container finished" podID="6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" containerID="1d1fb16483c7879e9c7fdd5996e2f2f16ae9f7addbb9dfa4b92c9c8ae7287fcf" exitCode=0 Oct 07 00:34:08 crc kubenswrapper[4791]: I1007 00:34:08.089985 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qz46v" event={"ID":"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6","Type":"ContainerDied","Data":"1d1fb16483c7879e9c7fdd5996e2f2f16ae9f7addbb9dfa4b92c9c8ae7287fcf"} Oct 07 00:34:08 crc kubenswrapper[4791]: I1007 00:34:08.090074 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qz46v" event={"ID":"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6","Type":"ContainerStarted","Data":"15f1e19e0fb48a75f86fe785f0c9c65e8974a4a4ce62580de328dd3b937eaad3"} Oct 07 00:34:09 crc kubenswrapper[4791]: I1007 00:34:09.099995 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qz46v" event={"ID":"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6","Type":"ContainerStarted","Data":"895643830e1e1065b6c799ffa455d3298bd9c16bcc37e0ab1502d118cd456b5a"} Oct 07 00:34:10 crc kubenswrapper[4791]: I1007 00:34:10.117812 4791 generic.go:334] "Generic (PLEG): container finished" podID="6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" containerID="895643830e1e1065b6c799ffa455d3298bd9c16bcc37e0ab1502d118cd456b5a" exitCode=0 Oct 07 00:34:10 crc kubenswrapper[4791]: I1007 00:34:10.117862 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qz46v" event={"ID":"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6","Type":"ContainerDied","Data":"895643830e1e1065b6c799ffa455d3298bd9c16bcc37e0ab1502d118cd456b5a"} Oct 07 00:34:10 crc kubenswrapper[4791]: I1007 00:34:10.117891 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qz46v" event={"ID":"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6","Type":"ContainerStarted","Data":"45dd81dd1394dd00e1022a73846a65195e1a07478f45e8afb9c8cc5e619ce0b9"} Oct 07 00:34:10 crc kubenswrapper[4791]: I1007 00:34:10.140995 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qz46v" podStartSLOduration=1.4296358 podStartE2EDuration="3.140976959s" podCreationTimestamp="2025-10-07 00:34:07 +0000 UTC" firstStartedPulling="2025-10-07 00:34:08.091106866 +0000 UTC m=+1374.687044507" lastFinishedPulling="2025-10-07 00:34:09.802448015 +0000 UTC m=+1376.398385666" observedRunningTime="2025-10-07 00:34:10.13441789 +0000 UTC m=+1376.730355551" watchObservedRunningTime="2025-10-07 00:34:10.140976959 +0000 UTC m=+1376.736914610" Oct 07 00:34:17 crc kubenswrapper[4791]: I1007 00:34:17.594384 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:17 crc kubenswrapper[4791]: I1007 00:34:17.596050 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:17 crc kubenswrapper[4791]: I1007 00:34:17.636786 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:18 crc kubenswrapper[4791]: I1007 00:34:18.217253 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:20 crc kubenswrapper[4791]: I1007 00:34:20.858599 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qz46v"] Oct 07 00:34:20 crc kubenswrapper[4791]: I1007 00:34:20.859670 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qz46v" podUID="6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" containerName="registry-server" containerID="cri-o://45dd81dd1394dd00e1022a73846a65195e1a07478f45e8afb9c8cc5e619ce0b9" gracePeriod=2 Oct 07 00:34:21 crc kubenswrapper[4791]: I1007 00:34:21.204177 4791 generic.go:334] "Generic (PLEG): container finished" podID="6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" containerID="45dd81dd1394dd00e1022a73846a65195e1a07478f45e8afb9c8cc5e619ce0b9" exitCode=0 Oct 07 00:34:21 crc kubenswrapper[4791]: I1007 00:34:21.204295 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qz46v" event={"ID":"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6","Type":"ContainerDied","Data":"45dd81dd1394dd00e1022a73846a65195e1a07478f45e8afb9c8cc5e619ce0b9"} Oct 07 00:34:22 crc kubenswrapper[4791]: I1007 00:34:22.804856 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:22 crc kubenswrapper[4791]: I1007 00:34:22.922324 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-utilities\") pod \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\" (UID: \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\") " Oct 07 00:34:22 crc kubenswrapper[4791]: I1007 00:34:22.922588 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5d4q\" (UniqueName: \"kubernetes.io/projected/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-kube-api-access-p5d4q\") pod \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\" (UID: \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\") " Oct 07 00:34:22 crc kubenswrapper[4791]: I1007 00:34:22.922761 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-catalog-content\") pod \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\" (UID: \"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6\") " Oct 07 00:34:22 crc kubenswrapper[4791]: I1007 00:34:22.923591 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-utilities" (OuterVolumeSpecName: "utilities") pod "6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" (UID: "6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:34:22 crc kubenswrapper[4791]: I1007 00:34:22.941912 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-kube-api-access-p5d4q" (OuterVolumeSpecName: "kube-api-access-p5d4q") pod "6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" (UID: "6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6"). InnerVolumeSpecName "kube-api-access-p5d4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:34:22 crc kubenswrapper[4791]: I1007 00:34:22.966027 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" (UID: "6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:34:23 crc kubenswrapper[4791]: I1007 00:34:23.024523 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:34:23 crc kubenswrapper[4791]: I1007 00:34:23.024568 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5d4q\" (UniqueName: \"kubernetes.io/projected/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-kube-api-access-p5d4q\") on node \"crc\" DevicePath \"\"" Oct 07 00:34:23 crc kubenswrapper[4791]: I1007 00:34:23.024582 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:34:23 crc kubenswrapper[4791]: I1007 00:34:23.221440 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qz46v" event={"ID":"6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6","Type":"ContainerDied","Data":"15f1e19e0fb48a75f86fe785f0c9c65e8974a4a4ce62580de328dd3b937eaad3"} Oct 07 00:34:23 crc kubenswrapper[4791]: I1007 00:34:23.221495 4791 scope.go:117] "RemoveContainer" containerID="45dd81dd1394dd00e1022a73846a65195e1a07478f45e8afb9c8cc5e619ce0b9" Oct 07 00:34:23 crc kubenswrapper[4791]: I1007 00:34:23.221554 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qz46v" Oct 07 00:34:23 crc kubenswrapper[4791]: I1007 00:34:23.249189 4791 scope.go:117] "RemoveContainer" containerID="895643830e1e1065b6c799ffa455d3298bd9c16bcc37e0ab1502d118cd456b5a" Oct 07 00:34:23 crc kubenswrapper[4791]: I1007 00:34:23.265799 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qz46v"] Oct 07 00:34:23 crc kubenswrapper[4791]: I1007 00:34:23.270922 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qz46v"] Oct 07 00:34:23 crc kubenswrapper[4791]: I1007 00:34:23.290771 4791 scope.go:117] "RemoveContainer" containerID="1d1fb16483c7879e9c7fdd5996e2f2f16ae9f7addbb9dfa4b92c9c8ae7287fcf" Oct 07 00:34:24 crc kubenswrapper[4791]: I1007 00:34:24.095681 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" path="/var/lib/kubelet/pods/6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6/volumes" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.617137 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-nv7gh"] Oct 07 00:34:25 crc kubenswrapper[4791]: E1007 00:34:25.617508 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" containerName="registry-server" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.617526 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" containerName="registry-server" Oct 07 00:34:25 crc kubenswrapper[4791]: E1007 00:34:25.617539 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" containerName="extract-utilities" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.617547 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" containerName="extract-utilities" Oct 07 00:34:25 crc kubenswrapper[4791]: E1007 00:34:25.617563 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" containerName="extract-content" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.617571 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" containerName="extract-content" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.617699 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="6be0d1f9-c53e-4fc7-9bd9-817fb0f29eb6" containerName="registry-server" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.618205 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.625603 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-credentials" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.625941 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-interconnect-sasl-config" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.626111 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-ca" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.626266 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-users" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.626630 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-credentials" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.626880 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-ca" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.627010 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-dockercfg-j7qj2" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.636866 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-nv7gh"] Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.761048 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.761106 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.761145 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chsn7\" (UniqueName: \"kubernetes.io/projected/4f73f408-67a7-4510-9ddb-a289ad131d4c-kube-api-access-chsn7\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.761186 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/4f73f408-67a7-4510-9ddb-a289ad131d4c-sasl-config\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.761237 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.761396 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.761486 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-sasl-users\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.863082 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.863148 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.863179 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chsn7\" (UniqueName: \"kubernetes.io/projected/4f73f408-67a7-4510-9ddb-a289ad131d4c-kube-api-access-chsn7\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.863219 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/4f73f408-67a7-4510-9ddb-a289ad131d4c-sasl-config\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.863255 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.863285 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.863319 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-sasl-users\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.864571 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/4f73f408-67a7-4510-9ddb-a289ad131d4c-sasl-config\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.869317 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.869382 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.870048 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.870282 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-sasl-users\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.880857 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chsn7\" (UniqueName: \"kubernetes.io/projected/4f73f408-67a7-4510-9ddb-a289ad131d4c-kube-api-access-chsn7\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.882368 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-nv7gh\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:25 crc kubenswrapper[4791]: I1007 00:34:25.936831 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:34:26 crc kubenswrapper[4791]: I1007 00:34:26.133693 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-nv7gh"] Oct 07 00:34:26 crc kubenswrapper[4791]: W1007 00:34:26.141187 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f73f408_67a7_4510_9ddb_a289ad131d4c.slice/crio-9c0716130141bbb36a290595f8279d8b229a41a25d2ac145439899c6941caa23 WatchSource:0}: Error finding container 9c0716130141bbb36a290595f8279d8b229a41a25d2ac145439899c6941caa23: Status 404 returned error can't find the container with id 9c0716130141bbb36a290595f8279d8b229a41a25d2ac145439899c6941caa23 Oct 07 00:34:26 crc kubenswrapper[4791]: I1007 00:34:26.247829 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" event={"ID":"4f73f408-67a7-4510-9ddb-a289ad131d4c","Type":"ContainerStarted","Data":"9c0716130141bbb36a290595f8279d8b229a41a25d2ac145439899c6941caa23"} Oct 07 00:34:31 crc kubenswrapper[4791]: I1007 00:34:31.280655 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" event={"ID":"4f73f408-67a7-4510-9ddb-a289ad131d4c","Type":"ContainerStarted","Data":"e2508cd1c67c406a06d20db7a64581b22cc46ce8a27f32744c9db1a227f9bdcb"} Oct 07 00:34:31 crc kubenswrapper[4791]: I1007 00:34:31.301928 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" podStartSLOduration=1.537382622 podStartE2EDuration="6.301909371s" podCreationTimestamp="2025-10-07 00:34:25 +0000 UTC" firstStartedPulling="2025-10-07 00:34:26.143001536 +0000 UTC m=+1392.738939187" lastFinishedPulling="2025-10-07 00:34:30.907528285 +0000 UTC m=+1397.503465936" observedRunningTime="2025-10-07 00:34:31.298144903 +0000 UTC m=+1397.894082554" watchObservedRunningTime="2025-10-07 00:34:31.301909371 +0000 UTC m=+1397.897847022" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.552634 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-default-0"] Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.554616 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.556555 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-prometheus-proxy-tls" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.556597 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.557131 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-web-config" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.558059 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.558551 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"serving-certs-ca-bundle" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.559209 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-stf-dockercfg-dm7r2" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.559801 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-tls-assets-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.560129 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-session-secret" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.595213 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.691538 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.691632 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-14a73042-7109-4d3b-b578-f5b2411825bc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-14a73042-7109-4d3b-b578-f5b2411825bc\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.691665 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-config-out\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.691689 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-config\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.691709 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.691746 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-tls-assets\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.691764 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.691783 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnqkc\" (UniqueName: \"kubernetes.io/projected/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-kube-api-access-rnqkc\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.691809 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.692044 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-web-config\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.792966 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-14a73042-7109-4d3b-b578-f5b2411825bc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-14a73042-7109-4d3b-b578-f5b2411825bc\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.793035 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-config-out\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.793063 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-config\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.793086 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.793131 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-tls-assets\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.793158 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.793176 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnqkc\" (UniqueName: \"kubernetes.io/projected/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-kube-api-access-rnqkc\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.793196 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.793673 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-web-config\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.793710 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: E1007 00:34:35.793727 4791 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Oct 07 00:34:35 crc kubenswrapper[4791]: E1007 00:34:35.793866 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-secret-default-prometheus-proxy-tls podName:ffe9e3fe-d9ab-478e-949f-0c8554bdf743 nodeName:}" failed. No retries permitted until 2025-10-07 00:34:36.293833751 +0000 UTC m=+1402.889771442 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "ffe9e3fe-d9ab-478e-949f-0c8554bdf743") : secret "default-prometheus-proxy-tls" not found Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.794634 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.796376 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.798183 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.798263 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-14a73042-7109-4d3b-b578-f5b2411825bc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-14a73042-7109-4d3b-b578-f5b2411825bc\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/bc2dbd8191b0f8c93ca48104c1741dff3078ce410fa5f5a2fb666e1494551b65/globalmount\"" pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.800528 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-config-out\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.801521 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.801563 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-config\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.808797 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-tls-assets\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.823576 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-web-config\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.826672 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnqkc\" (UniqueName: \"kubernetes.io/projected/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-kube-api-access-rnqkc\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:35 crc kubenswrapper[4791]: I1007 00:34:35.839739 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-14a73042-7109-4d3b-b578-f5b2411825bc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-14a73042-7109-4d3b-b578-f5b2411825bc\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:36 crc kubenswrapper[4791]: I1007 00:34:36.302336 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:36 crc kubenswrapper[4791]: E1007 00:34:36.302675 4791 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Oct 07 00:34:36 crc kubenswrapper[4791]: E1007 00:34:36.302771 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-secret-default-prometheus-proxy-tls podName:ffe9e3fe-d9ab-478e-949f-0c8554bdf743 nodeName:}" failed. No retries permitted until 2025-10-07 00:34:37.302741034 +0000 UTC m=+1403.898678715 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "ffe9e3fe-d9ab-478e-949f-0c8554bdf743") : secret "default-prometheus-proxy-tls" not found Oct 07 00:34:37 crc kubenswrapper[4791]: I1007 00:34:37.316571 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:37 crc kubenswrapper[4791]: I1007 00:34:37.323420 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ffe9e3fe-d9ab-478e-949f-0c8554bdf743-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"ffe9e3fe-d9ab-478e-949f-0c8554bdf743\") " pod="service-telemetry/prometheus-default-0" Oct 07 00:34:37 crc kubenswrapper[4791]: I1007 00:34:37.386189 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Oct 07 00:34:37 crc kubenswrapper[4791]: I1007 00:34:37.797320 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Oct 07 00:34:38 crc kubenswrapper[4791]: I1007 00:34:38.324017 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"ffe9e3fe-d9ab-478e-949f-0c8554bdf743","Type":"ContainerStarted","Data":"9350a8834f8e8cf953c7129d1f103377c2d4d540b24939eeae358092ac3549f6"} Oct 07 00:34:42 crc kubenswrapper[4791]: I1007 00:34:42.355513 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"ffe9e3fe-d9ab-478e-949f-0c8554bdf743","Type":"ContainerStarted","Data":"72a8b15150314b88a4de53f4b6193aadef14ead57342c2b333a82d973726f032"} Oct 07 00:34:45 crc kubenswrapper[4791]: I1007 00:34:45.173914 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-snmp-webhook-6856cfb745-kj4sf"] Oct 07 00:34:45 crc kubenswrapper[4791]: I1007 00:34:45.175524 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-6856cfb745-kj4sf" Oct 07 00:34:45 crc kubenswrapper[4791]: I1007 00:34:45.187149 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-6856cfb745-kj4sf"] Oct 07 00:34:45 crc kubenswrapper[4791]: I1007 00:34:45.323114 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v547q\" (UniqueName: \"kubernetes.io/projected/4d373495-5026-4d85-af77-e8e11f853bf2-kube-api-access-v547q\") pod \"default-snmp-webhook-6856cfb745-kj4sf\" (UID: \"4d373495-5026-4d85-af77-e8e11f853bf2\") " pod="service-telemetry/default-snmp-webhook-6856cfb745-kj4sf" Oct 07 00:34:45 crc kubenswrapper[4791]: I1007 00:34:45.424211 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v547q\" (UniqueName: \"kubernetes.io/projected/4d373495-5026-4d85-af77-e8e11f853bf2-kube-api-access-v547q\") pod \"default-snmp-webhook-6856cfb745-kj4sf\" (UID: \"4d373495-5026-4d85-af77-e8e11f853bf2\") " pod="service-telemetry/default-snmp-webhook-6856cfb745-kj4sf" Oct 07 00:34:45 crc kubenswrapper[4791]: I1007 00:34:45.442889 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v547q\" (UniqueName: \"kubernetes.io/projected/4d373495-5026-4d85-af77-e8e11f853bf2-kube-api-access-v547q\") pod \"default-snmp-webhook-6856cfb745-kj4sf\" (UID: \"4d373495-5026-4d85-af77-e8e11f853bf2\") " pod="service-telemetry/default-snmp-webhook-6856cfb745-kj4sf" Oct 07 00:34:45 crc kubenswrapper[4791]: I1007 00:34:45.498192 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-6856cfb745-kj4sf" Oct 07 00:34:45 crc kubenswrapper[4791]: I1007 00:34:45.937882 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-6856cfb745-kj4sf"] Oct 07 00:34:46 crc kubenswrapper[4791]: I1007 00:34:46.383447 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-6856cfb745-kj4sf" event={"ID":"4d373495-5026-4d85-af77-e8e11f853bf2","Type":"ContainerStarted","Data":"fcd5bf6eea5ccdadcd1b0ae0a579b804220dd90d3354ba9eda8b20d1addc8f45"} Oct 07 00:34:48 crc kubenswrapper[4791]: I1007 00:34:48.403343 4791 generic.go:334] "Generic (PLEG): container finished" podID="ffe9e3fe-d9ab-478e-949f-0c8554bdf743" containerID="72a8b15150314b88a4de53f4b6193aadef14ead57342c2b333a82d973726f032" exitCode=0 Oct 07 00:34:48 crc kubenswrapper[4791]: I1007 00:34:48.403654 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"ffe9e3fe-d9ab-478e-949f-0c8554bdf743","Type":"ContainerDied","Data":"72a8b15150314b88a4de53f4b6193aadef14ead57342c2b333a82d973726f032"} Oct 07 00:34:48 crc kubenswrapper[4791]: I1007 00:34:48.999646 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/alertmanager-default-0"] Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.001392 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.005153 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-web-config" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.005325 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-alertmanager-proxy-tls" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.005509 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-stf-dockercfg-mb8vz" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.005600 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-generated" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.009624 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-tls-assets-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.028956 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.073290 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/123abef3-fcea-4f90-9058-7dccb2de989a-tls-assets\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.073332 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/123abef3-fcea-4f90-9058-7dccb2de989a-config-out\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.073368 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.073392 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c53f3e89-6db7-4025-ae76-3999f5f57e9c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c53f3e89-6db7-4025-ae76-3999f5f57e9c\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.073422 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhtqv\" (UniqueName: \"kubernetes.io/projected/123abef3-fcea-4f90-9058-7dccb2de989a-kube-api-access-bhtqv\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.073439 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-config-volume\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.073471 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.073493 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-web-config\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.174133 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.174198 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-web-config\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.174243 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/123abef3-fcea-4f90-9058-7dccb2de989a-tls-assets\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.174264 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/123abef3-fcea-4f90-9058-7dccb2de989a-config-out\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.174289 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.174310 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c53f3e89-6db7-4025-ae76-3999f5f57e9c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c53f3e89-6db7-4025-ae76-3999f5f57e9c\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.174334 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhtqv\" (UniqueName: \"kubernetes.io/projected/123abef3-fcea-4f90-9058-7dccb2de989a-kube-api-access-bhtqv\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.174362 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-config-volume\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: E1007 00:34:49.175039 4791 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Oct 07 00:34:49 crc kubenswrapper[4791]: E1007 00:34:49.175141 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-alertmanager-proxy-tls podName:123abef3-fcea-4f90-9058-7dccb2de989a nodeName:}" failed. No retries permitted until 2025-10-07 00:34:49.67511274 +0000 UTC m=+1416.271050481 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "123abef3-fcea-4f90-9058-7dccb2de989a") : secret "default-alertmanager-proxy-tls" not found Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.178958 4791 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.179020 4791 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c53f3e89-6db7-4025-ae76-3999f5f57e9c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c53f3e89-6db7-4025-ae76-3999f5f57e9c\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d547330cdc685abf0abe105bb5daa52a85ef31ec05ff640113942cc642093bc4/globalmount\"" pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.181482 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.188659 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/123abef3-fcea-4f90-9058-7dccb2de989a-config-out\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.188820 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/123abef3-fcea-4f90-9058-7dccb2de989a-tls-assets\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.189036 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-web-config\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.189802 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-config-volume\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.202973 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c53f3e89-6db7-4025-ae76-3999f5f57e9c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c53f3e89-6db7-4025-ae76-3999f5f57e9c\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.203734 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhtqv\" (UniqueName: \"kubernetes.io/projected/123abef3-fcea-4f90-9058-7dccb2de989a-kube-api-access-bhtqv\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: I1007 00:34:49.682804 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:49 crc kubenswrapper[4791]: E1007 00:34:49.683149 4791 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Oct 07 00:34:49 crc kubenswrapper[4791]: E1007 00:34:49.683227 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-alertmanager-proxy-tls podName:123abef3-fcea-4f90-9058-7dccb2de989a nodeName:}" failed. No retries permitted until 2025-10-07 00:34:50.6832074 +0000 UTC m=+1417.279145051 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "123abef3-fcea-4f90-9058-7dccb2de989a") : secret "default-alertmanager-proxy-tls" not found Oct 07 00:34:50 crc kubenswrapper[4791]: I1007 00:34:50.696209 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:50 crc kubenswrapper[4791]: E1007 00:34:50.696342 4791 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Oct 07 00:34:50 crc kubenswrapper[4791]: E1007 00:34:50.696468 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-alertmanager-proxy-tls podName:123abef3-fcea-4f90-9058-7dccb2de989a nodeName:}" failed. No retries permitted until 2025-10-07 00:34:52.696453974 +0000 UTC m=+1419.292391625 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "123abef3-fcea-4f90-9058-7dccb2de989a") : secret "default-alertmanager-proxy-tls" not found Oct 07 00:34:52 crc kubenswrapper[4791]: I1007 00:34:52.727672 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:52 crc kubenswrapper[4791]: I1007 00:34:52.736839 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/123abef3-fcea-4f90-9058-7dccb2de989a-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"123abef3-fcea-4f90-9058-7dccb2de989a\") " pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:52 crc kubenswrapper[4791]: I1007 00:34:52.930206 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Oct 07 00:34:53 crc kubenswrapper[4791]: I1007 00:34:53.146981 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Oct 07 00:34:53 crc kubenswrapper[4791]: I1007 00:34:53.471304 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-6856cfb745-kj4sf" event={"ID":"4d373495-5026-4d85-af77-e8e11f853bf2","Type":"ContainerStarted","Data":"dbc3a35d74b4cf1ccf43d24a89f6131e7123a62d872fefc86be01acb5fe9cf11"} Oct 07 00:34:53 crc kubenswrapper[4791]: I1007 00:34:53.475502 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"123abef3-fcea-4f90-9058-7dccb2de989a","Type":"ContainerStarted","Data":"dfcbfda1ff449ffc6b264281bc7a28561583f2d1c4c7ed2108a4f681be5ce006"} Oct 07 00:34:53 crc kubenswrapper[4791]: I1007 00:34:53.489253 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-snmp-webhook-6856cfb745-kj4sf" podStartSLOduration=1.8305160969999998 podStartE2EDuration="8.489239076s" podCreationTimestamp="2025-10-07 00:34:45 +0000 UTC" firstStartedPulling="2025-10-07 00:34:45.950948143 +0000 UTC m=+1412.546885784" lastFinishedPulling="2025-10-07 00:34:52.609671112 +0000 UTC m=+1419.205608763" observedRunningTime="2025-10-07 00:34:53.48695618 +0000 UTC m=+1420.082893831" watchObservedRunningTime="2025-10-07 00:34:53.489239076 +0000 UTC m=+1420.085176727" Oct 07 00:34:55 crc kubenswrapper[4791]: I1007 00:34:55.490707 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"123abef3-fcea-4f90-9058-7dccb2de989a","Type":"ContainerStarted","Data":"25b3de6f410a10296ab72225c005c438a9ab224bec0c04977dab72267948ec03"} Oct 07 00:34:56 crc kubenswrapper[4791]: I1007 00:34:56.498882 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"ffe9e3fe-d9ab-478e-949f-0c8554bdf743","Type":"ContainerStarted","Data":"d05ca093456ee98762e544e356937227d758072294aa9d064ec4ee29b0fbaa4d"} Oct 07 00:34:58 crc kubenswrapper[4791]: I1007 00:34:58.516627 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"ffe9e3fe-d9ab-478e-949f-0c8554bdf743","Type":"ContainerStarted","Data":"519f8e3b0fab0016f54503f7ae2d18c9b526cbaa6bb885731478b8313d121691"} Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.311816 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th"] Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.313652 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.316762 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-meter-sg-core-configmap" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.317475 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-coll-meter-proxy-tls" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.319617 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-dockercfg-z6ql5" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.320465 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-session-secret" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.332823 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th"] Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.367294 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/3a91f4b9-8355-444b-9ba0-d4ceab19244b-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.367463 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jdbt\" (UniqueName: \"kubernetes.io/projected/3a91f4b9-8355-444b-9ba0-d4ceab19244b-kube-api-access-8jdbt\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.367513 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/3a91f4b9-8355-444b-9ba0-d4ceab19244b-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.367580 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/3a91f4b9-8355-444b-9ba0-d4ceab19244b-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.367617 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/3a91f4b9-8355-444b-9ba0-d4ceab19244b-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.468168 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/3a91f4b9-8355-444b-9ba0-d4ceab19244b-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.468243 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/3a91f4b9-8355-444b-9ba0-d4ceab19244b-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.468295 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jdbt\" (UniqueName: \"kubernetes.io/projected/3a91f4b9-8355-444b-9ba0-d4ceab19244b-kube-api-access-8jdbt\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.468320 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/3a91f4b9-8355-444b-9ba0-d4ceab19244b-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.468343 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/3a91f4b9-8355-444b-9ba0-d4ceab19244b-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: E1007 00:35:01.468379 4791 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Oct 07 00:35:01 crc kubenswrapper[4791]: E1007 00:35:01.468495 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3a91f4b9-8355-444b-9ba0-d4ceab19244b-default-cloud1-coll-meter-proxy-tls podName:3a91f4b9-8355-444b-9ba0-d4ceab19244b nodeName:}" failed. No retries permitted until 2025-10-07 00:35:01.968470417 +0000 UTC m=+1428.564408158 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/3a91f4b9-8355-444b-9ba0-d4ceab19244b-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" (UID: "3a91f4b9-8355-444b-9ba0-d4ceab19244b") : secret "default-cloud1-coll-meter-proxy-tls" not found Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.468743 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/3a91f4b9-8355-444b-9ba0-d4ceab19244b-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.469197 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/3a91f4b9-8355-444b-9ba0-d4ceab19244b-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.476342 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/3a91f4b9-8355-444b-9ba0-d4ceab19244b-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.487074 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jdbt\" (UniqueName: \"kubernetes.io/projected/3a91f4b9-8355-444b-9ba0-d4ceab19244b-kube-api-access-8jdbt\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: I1007 00:35:01.976335 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/3a91f4b9-8355-444b-9ba0-d4ceab19244b-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:01 crc kubenswrapper[4791]: E1007 00:35:01.976561 4791 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Oct 07 00:35:01 crc kubenswrapper[4791]: E1007 00:35:01.976651 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3a91f4b9-8355-444b-9ba0-d4ceab19244b-default-cloud1-coll-meter-proxy-tls podName:3a91f4b9-8355-444b-9ba0-d4ceab19244b nodeName:}" failed. No retries permitted until 2025-10-07 00:35:02.976631337 +0000 UTC m=+1429.572568988 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/3a91f4b9-8355-444b-9ba0-d4ceab19244b-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" (UID: "3a91f4b9-8355-444b-9ba0-d4ceab19244b") : secret "default-cloud1-coll-meter-proxy-tls" not found Oct 07 00:35:02 crc kubenswrapper[4791]: I1007 00:35:02.548141 4791 generic.go:334] "Generic (PLEG): container finished" podID="123abef3-fcea-4f90-9058-7dccb2de989a" containerID="25b3de6f410a10296ab72225c005c438a9ab224bec0c04977dab72267948ec03" exitCode=0 Oct 07 00:35:02 crc kubenswrapper[4791]: I1007 00:35:02.548348 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"123abef3-fcea-4f90-9058-7dccb2de989a","Type":"ContainerDied","Data":"25b3de6f410a10296ab72225c005c438a9ab224bec0c04977dab72267948ec03"} Oct 07 00:35:02 crc kubenswrapper[4791]: I1007 00:35:02.991325 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/3a91f4b9-8355-444b-9ba0-d4ceab19244b-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:02 crc kubenswrapper[4791]: I1007 00:35:02.998166 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/3a91f4b9-8355-444b-9ba0-d4ceab19244b-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th\" (UID: \"3a91f4b9-8355-444b-9ba0-d4ceab19244b\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:03 crc kubenswrapper[4791]: I1007 00:35:03.188512 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" Oct 07 00:35:03 crc kubenswrapper[4791]: I1007 00:35:03.841132 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts"] Oct 07 00:35:03 crc kubenswrapper[4791]: I1007 00:35:03.842794 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:03 crc kubenswrapper[4791]: I1007 00:35:03.848607 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts"] Oct 07 00:35:03 crc kubenswrapper[4791]: I1007 00:35:03.849769 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-ceil-meter-proxy-tls" Oct 07 00:35:03 crc kubenswrapper[4791]: I1007 00:35:03.849974 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-meter-sg-core-configmap" Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.006421 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/82f12631-923c-4251-9fa8-ff7358158c78-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.006484 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/82f12631-923c-4251-9fa8-ff7358158c78-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.006554 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5295p\" (UniqueName: \"kubernetes.io/projected/82f12631-923c-4251-9fa8-ff7358158c78-kube-api-access-5295p\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.006577 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/82f12631-923c-4251-9fa8-ff7358158c78-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.006607 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/82f12631-923c-4251-9fa8-ff7358158c78-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.108086 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5295p\" (UniqueName: \"kubernetes.io/projected/82f12631-923c-4251-9fa8-ff7358158c78-kube-api-access-5295p\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.108152 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/82f12631-923c-4251-9fa8-ff7358158c78-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.108200 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/82f12631-923c-4251-9fa8-ff7358158c78-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.108239 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/82f12631-923c-4251-9fa8-ff7358158c78-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.108309 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/82f12631-923c-4251-9fa8-ff7358158c78-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.108721 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/82f12631-923c-4251-9fa8-ff7358158c78-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: E1007 00:35:04.110372 4791 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Oct 07 00:35:04 crc kubenswrapper[4791]: E1007 00:35:04.110496 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/82f12631-923c-4251-9fa8-ff7358158c78-default-cloud1-ceil-meter-proxy-tls podName:82f12631-923c-4251-9fa8-ff7358158c78 nodeName:}" failed. No retries permitted until 2025-10-07 00:35:04.610471288 +0000 UTC m=+1431.206408999 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/82f12631-923c-4251-9fa8-ff7358158c78-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" (UID: "82f12631-923c-4251-9fa8-ff7358158c78") : secret "default-cloud1-ceil-meter-proxy-tls" not found Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.112065 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/82f12631-923c-4251-9fa8-ff7358158c78-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.116473 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/82f12631-923c-4251-9fa8-ff7358158c78-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.127829 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5295p\" (UniqueName: \"kubernetes.io/projected/82f12631-923c-4251-9fa8-ff7358158c78-kube-api-access-5295p\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.615801 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/82f12631-923c-4251-9fa8-ff7358158c78-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:04 crc kubenswrapper[4791]: E1007 00:35:04.616096 4791 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Oct 07 00:35:04 crc kubenswrapper[4791]: E1007 00:35:04.616163 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/82f12631-923c-4251-9fa8-ff7358158c78-default-cloud1-ceil-meter-proxy-tls podName:82f12631-923c-4251-9fa8-ff7358158c78 nodeName:}" failed. No retries permitted until 2025-10-07 00:35:05.616147596 +0000 UTC m=+1432.212085247 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/82f12631-923c-4251-9fa8-ff7358158c78-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" (UID: "82f12631-923c-4251-9fa8-ff7358158c78") : secret "default-cloud1-ceil-meter-proxy-tls" not found Oct 07 00:35:04 crc kubenswrapper[4791]: I1007 00:35:04.794057 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th"] Oct 07 00:35:04 crc kubenswrapper[4791]: W1007 00:35:04.808221 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a91f4b9_8355_444b_9ba0_d4ceab19244b.slice/crio-d5aa4a8a9ae028fb0348e15dbd2259d2aee1493b36baea519e1668470c7a42d5 WatchSource:0}: Error finding container d5aa4a8a9ae028fb0348e15dbd2259d2aee1493b36baea519e1668470c7a42d5: Status 404 returned error can't find the container with id d5aa4a8a9ae028fb0348e15dbd2259d2aee1493b36baea519e1668470c7a42d5 Oct 07 00:35:05 crc kubenswrapper[4791]: I1007 00:35:05.572091 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"ffe9e3fe-d9ab-478e-949f-0c8554bdf743","Type":"ContainerStarted","Data":"8ac09f14ee16dcb11ef46cc653067d97648d5908fbb3fcf190f3138b74dd8a09"} Oct 07 00:35:05 crc kubenswrapper[4791]: I1007 00:35:05.588315 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" event={"ID":"3a91f4b9-8355-444b-9ba0-d4ceab19244b","Type":"ContainerStarted","Data":"d5aa4a8a9ae028fb0348e15dbd2259d2aee1493b36baea519e1668470c7a42d5"} Oct 07 00:35:05 crc kubenswrapper[4791]: I1007 00:35:05.601355 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-default-0" podStartSLOduration=4.738796904 podStartE2EDuration="31.601334967s" podCreationTimestamp="2025-10-07 00:34:34 +0000 UTC" firstStartedPulling="2025-10-07 00:34:37.804381629 +0000 UTC m=+1404.400319280" lastFinishedPulling="2025-10-07 00:35:04.666919692 +0000 UTC m=+1431.262857343" observedRunningTime="2025-10-07 00:35:05.599766192 +0000 UTC m=+1432.195703833" watchObservedRunningTime="2025-10-07 00:35:05.601334967 +0000 UTC m=+1432.197272618" Oct 07 00:35:05 crc kubenswrapper[4791]: I1007 00:35:05.643460 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/82f12631-923c-4251-9fa8-ff7358158c78-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:05 crc kubenswrapper[4791]: I1007 00:35:05.649169 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/82f12631-923c-4251-9fa8-ff7358158c78-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-86mts\" (UID: \"82f12631-923c-4251-9fa8-ff7358158c78\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:05 crc kubenswrapper[4791]: I1007 00:35:05.669794 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" Oct 07 00:35:06 crc kubenswrapper[4791]: I1007 00:35:06.261643 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts"] Oct 07 00:35:06 crc kubenswrapper[4791]: I1007 00:35:06.595854 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" event={"ID":"82f12631-923c-4251-9fa8-ff7358158c78","Type":"ContainerStarted","Data":"0b0f142209bf4346e6b3087417ed7c2629a8caaa27dc45203bd3511303b84c52"} Oct 07 00:35:06 crc kubenswrapper[4791]: I1007 00:35:06.598082 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"123abef3-fcea-4f90-9058-7dccb2de989a","Type":"ContainerStarted","Data":"faf3a5db666d2cdbea8807ac22ad3463813e283f7db397fa8ba60f289b2be38d"} Oct 07 00:35:06 crc kubenswrapper[4791]: I1007 00:35:06.600281 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" event={"ID":"3a91f4b9-8355-444b-9ba0-d4ceab19244b","Type":"ContainerStarted","Data":"482f4b6640e9a8cebeb9348c7499ee7a12c384ae03ecad94a85bd49255691d4c"} Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.386427 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/prometheus-default-0" Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.386477 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/prometheus-default-0" Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.433854 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/prometheus-default-0" Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.613555 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" event={"ID":"82f12631-923c-4251-9fa8-ff7358158c78","Type":"ContainerStarted","Data":"1390ac2650004b2ce75e884afa4bad2aac028733d188e6ad9ed28089acf9ada3"} Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.649733 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/prometheus-default-0" Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.745871 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h"] Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.748912 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.753380 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-sens-meter-proxy-tls" Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.756007 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-sens-meter-sg-core-configmap" Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.762451 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h"] Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.914541 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r48h\" (UniqueName: \"kubernetes.io/projected/ffbf236c-e7ab-41a2-8083-ff71bb300966-kube-api-access-6r48h\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.914589 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/ffbf236c-e7ab-41a2-8083-ff71bb300966-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.914613 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/ffbf236c-e7ab-41a2-8083-ff71bb300966-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.914632 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ffbf236c-e7ab-41a2-8083-ff71bb300966-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:07 crc kubenswrapper[4791]: I1007 00:35:07.914665 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/ffbf236c-e7ab-41a2-8083-ff71bb300966-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:08 crc kubenswrapper[4791]: I1007 00:35:08.016197 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r48h\" (UniqueName: \"kubernetes.io/projected/ffbf236c-e7ab-41a2-8083-ff71bb300966-kube-api-access-6r48h\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:08 crc kubenswrapper[4791]: I1007 00:35:08.016238 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/ffbf236c-e7ab-41a2-8083-ff71bb300966-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:08 crc kubenswrapper[4791]: I1007 00:35:08.016261 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/ffbf236c-e7ab-41a2-8083-ff71bb300966-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:08 crc kubenswrapper[4791]: I1007 00:35:08.016281 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ffbf236c-e7ab-41a2-8083-ff71bb300966-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:08 crc kubenswrapper[4791]: I1007 00:35:08.016307 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/ffbf236c-e7ab-41a2-8083-ff71bb300966-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:08 crc kubenswrapper[4791]: E1007 00:35:08.016602 4791 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Oct 07 00:35:08 crc kubenswrapper[4791]: E1007 00:35:08.016703 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ffbf236c-e7ab-41a2-8083-ff71bb300966-default-cloud1-sens-meter-proxy-tls podName:ffbf236c-e7ab-41a2-8083-ff71bb300966 nodeName:}" failed. No retries permitted until 2025-10-07 00:35:08.516683224 +0000 UTC m=+1435.112620875 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/ffbf236c-e7ab-41a2-8083-ff71bb300966-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" (UID: "ffbf236c-e7ab-41a2-8083-ff71bb300966") : secret "default-cloud1-sens-meter-proxy-tls" not found Oct 07 00:35:08 crc kubenswrapper[4791]: I1007 00:35:08.016755 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/ffbf236c-e7ab-41a2-8083-ff71bb300966-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:08 crc kubenswrapper[4791]: I1007 00:35:08.017903 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/ffbf236c-e7ab-41a2-8083-ff71bb300966-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:08 crc kubenswrapper[4791]: I1007 00:35:08.033569 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r48h\" (UniqueName: \"kubernetes.io/projected/ffbf236c-e7ab-41a2-8083-ff71bb300966-kube-api-access-6r48h\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:08 crc kubenswrapper[4791]: I1007 00:35:08.037784 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/ffbf236c-e7ab-41a2-8083-ff71bb300966-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:08 crc kubenswrapper[4791]: I1007 00:35:08.522555 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ffbf236c-e7ab-41a2-8083-ff71bb300966-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:08 crc kubenswrapper[4791]: E1007 00:35:08.522769 4791 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Oct 07 00:35:08 crc kubenswrapper[4791]: E1007 00:35:08.522855 4791 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ffbf236c-e7ab-41a2-8083-ff71bb300966-default-cloud1-sens-meter-proxy-tls podName:ffbf236c-e7ab-41a2-8083-ff71bb300966 nodeName:}" failed. No retries permitted until 2025-10-07 00:35:09.522836746 +0000 UTC m=+1436.118774397 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/ffbf236c-e7ab-41a2-8083-ff71bb300966-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" (UID: "ffbf236c-e7ab-41a2-8083-ff71bb300966") : secret "default-cloud1-sens-meter-proxy-tls" not found Oct 07 00:35:08 crc kubenswrapper[4791]: I1007 00:35:08.628476 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"123abef3-fcea-4f90-9058-7dccb2de989a","Type":"ContainerStarted","Data":"cc2c19d5589fe1e4b9cef544c09bcc1eb262fa17cbf8b1c0bd23c373c0288e97"} Oct 07 00:35:09 crc kubenswrapper[4791]: I1007 00:35:09.536171 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ffbf236c-e7ab-41a2-8083-ff71bb300966-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:09 crc kubenswrapper[4791]: I1007 00:35:09.545032 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ffbf236c-e7ab-41a2-8083-ff71bb300966-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h\" (UID: \"ffbf236c-e7ab-41a2-8083-ff71bb300966\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:09 crc kubenswrapper[4791]: I1007 00:35:09.576446 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" Oct 07 00:35:09 crc kubenswrapper[4791]: I1007 00:35:09.975479 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-h625g"] Oct 07 00:35:09 crc kubenswrapper[4791]: I1007 00:35:09.977010 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:09 crc kubenswrapper[4791]: I1007 00:35:09.985121 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h625g"] Oct 07 00:35:10 crc kubenswrapper[4791]: I1007 00:35:10.143993 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea676fd-a37a-4723-a235-212c3d2a5d32-utilities\") pod \"redhat-operators-h625g\" (UID: \"cea676fd-a37a-4723-a235-212c3d2a5d32\") " pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:10 crc kubenswrapper[4791]: I1007 00:35:10.144088 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea676fd-a37a-4723-a235-212c3d2a5d32-catalog-content\") pod \"redhat-operators-h625g\" (UID: \"cea676fd-a37a-4723-a235-212c3d2a5d32\") " pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:10 crc kubenswrapper[4791]: I1007 00:35:10.144116 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lglcm\" (UniqueName: \"kubernetes.io/projected/cea676fd-a37a-4723-a235-212c3d2a5d32-kube-api-access-lglcm\") pod \"redhat-operators-h625g\" (UID: \"cea676fd-a37a-4723-a235-212c3d2a5d32\") " pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:10 crc kubenswrapper[4791]: I1007 00:35:10.245330 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lglcm\" (UniqueName: \"kubernetes.io/projected/cea676fd-a37a-4723-a235-212c3d2a5d32-kube-api-access-lglcm\") pod \"redhat-operators-h625g\" (UID: \"cea676fd-a37a-4723-a235-212c3d2a5d32\") " pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:10 crc kubenswrapper[4791]: I1007 00:35:10.245438 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea676fd-a37a-4723-a235-212c3d2a5d32-utilities\") pod \"redhat-operators-h625g\" (UID: \"cea676fd-a37a-4723-a235-212c3d2a5d32\") " pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:10 crc kubenswrapper[4791]: I1007 00:35:10.245519 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea676fd-a37a-4723-a235-212c3d2a5d32-catalog-content\") pod \"redhat-operators-h625g\" (UID: \"cea676fd-a37a-4723-a235-212c3d2a5d32\") " pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:10 crc kubenswrapper[4791]: I1007 00:35:10.246089 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea676fd-a37a-4723-a235-212c3d2a5d32-catalog-content\") pod \"redhat-operators-h625g\" (UID: \"cea676fd-a37a-4723-a235-212c3d2a5d32\") " pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:10 crc kubenswrapper[4791]: I1007 00:35:10.246160 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea676fd-a37a-4723-a235-212c3d2a5d32-utilities\") pod \"redhat-operators-h625g\" (UID: \"cea676fd-a37a-4723-a235-212c3d2a5d32\") " pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:10 crc kubenswrapper[4791]: I1007 00:35:10.265074 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lglcm\" (UniqueName: \"kubernetes.io/projected/cea676fd-a37a-4723-a235-212c3d2a5d32-kube-api-access-lglcm\") pod \"redhat-operators-h625g\" (UID: \"cea676fd-a37a-4723-a235-212c3d2a5d32\") " pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:10 crc kubenswrapper[4791]: I1007 00:35:10.302377 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:11 crc kubenswrapper[4791]: I1007 00:35:11.600674 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:35:11 crc kubenswrapper[4791]: I1007 00:35:11.600723 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:35:12 crc kubenswrapper[4791]: I1007 00:35:12.946062 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h625g"] Oct 07 00:35:12 crc kubenswrapper[4791]: W1007 00:35:12.956749 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcea676fd_a37a_4723_a235_212c3d2a5d32.slice/crio-b45a1df0efb9b4b70e9cb2c68b099555cc5f003a54960f02eebd48017baf97cf WatchSource:0}: Error finding container b45a1df0efb9b4b70e9cb2c68b099555cc5f003a54960f02eebd48017baf97cf: Status 404 returned error can't find the container with id b45a1df0efb9b4b70e9cb2c68b099555cc5f003a54960f02eebd48017baf97cf Oct 07 00:35:13 crc kubenswrapper[4791]: I1007 00:35:13.045244 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h"] Oct 07 00:35:13 crc kubenswrapper[4791]: I1007 00:35:13.699098 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"123abef3-fcea-4f90-9058-7dccb2de989a","Type":"ContainerStarted","Data":"19481324a7af49cdae64aed749c2561fec2918087fd98dce9135006222af4853"} Oct 07 00:35:13 crc kubenswrapper[4791]: I1007 00:35:13.717327 4791 generic.go:334] "Generic (PLEG): container finished" podID="cea676fd-a37a-4723-a235-212c3d2a5d32" containerID="7234d01d23a649e2e8840c7c4de389dd1c05e0da79883490995742595fcca9ed" exitCode=0 Oct 07 00:35:13 crc kubenswrapper[4791]: I1007 00:35:13.717763 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h625g" event={"ID":"cea676fd-a37a-4723-a235-212c3d2a5d32","Type":"ContainerDied","Data":"7234d01d23a649e2e8840c7c4de389dd1c05e0da79883490995742595fcca9ed"} Oct 07 00:35:13 crc kubenswrapper[4791]: I1007 00:35:13.717792 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h625g" event={"ID":"cea676fd-a37a-4723-a235-212c3d2a5d32","Type":"ContainerStarted","Data":"b45a1df0efb9b4b70e9cb2c68b099555cc5f003a54960f02eebd48017baf97cf"} Oct 07 00:35:13 crc kubenswrapper[4791]: I1007 00:35:13.746340 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" event={"ID":"3a91f4b9-8355-444b-9ba0-d4ceab19244b","Type":"ContainerStarted","Data":"342ad85ff80cd08a6a8025ac2c81e9934bd4c792c3724ae73a88bd74b51381b2"} Oct 07 00:35:13 crc kubenswrapper[4791]: I1007 00:35:13.759128 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/alertmanager-default-0" podStartSLOduration=16.689058422 podStartE2EDuration="26.759104839s" podCreationTimestamp="2025-10-07 00:34:47 +0000 UTC" firstStartedPulling="2025-10-07 00:35:02.550741981 +0000 UTC m=+1429.146679622" lastFinishedPulling="2025-10-07 00:35:12.620788388 +0000 UTC m=+1439.216726039" observedRunningTime="2025-10-07 00:35:13.740059579 +0000 UTC m=+1440.335997240" watchObservedRunningTime="2025-10-07 00:35:13.759104839 +0000 UTC m=+1440.355042490" Oct 07 00:35:13 crc kubenswrapper[4791]: I1007 00:35:13.766936 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" event={"ID":"82f12631-923c-4251-9fa8-ff7358158c78","Type":"ContainerStarted","Data":"4291c42ee992ff449f8b2c4edaeb8a1a6d2312147d95e7e5d3d5730069e6e52b"} Oct 07 00:35:13 crc kubenswrapper[4791]: I1007 00:35:13.773010 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" event={"ID":"ffbf236c-e7ab-41a2-8083-ff71bb300966","Type":"ContainerStarted","Data":"b038d1c0742c140ac6a81bd7b97b9da1779781fbb88dc2e9314ae1520fc97213"} Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.724986 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj"] Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.726667 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.729889 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-cert" Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.734708 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj"] Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.735017 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-event-sg-core-configmap" Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.788347 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" event={"ID":"ffbf236c-e7ab-41a2-8083-ff71bb300966","Type":"ContainerStarted","Data":"c3e991af64ae81a293bf7b39fcedac51d63fe3a2c298a141a79987b38cd0ea31"} Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.788388 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" event={"ID":"ffbf236c-e7ab-41a2-8083-ff71bb300966","Type":"ContainerStarted","Data":"0cc78c686c66cdf185ca1aa941ca1092a74600bb68f0e2c7b1d45e739e821161"} Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.818741 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79dg4\" (UniqueName: \"kubernetes.io/projected/62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37-kube-api-access-79dg4\") pod \"default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj\" (UID: \"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.818850 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj\" (UID: \"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.818915 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj\" (UID: \"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.818985 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj\" (UID: \"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.920457 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj\" (UID: \"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.920555 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj\" (UID: \"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.920611 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj\" (UID: \"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.920645 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79dg4\" (UniqueName: \"kubernetes.io/projected/62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37-kube-api-access-79dg4\") pod \"default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj\" (UID: \"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.921664 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj\" (UID: \"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.922183 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj\" (UID: \"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.931340 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj\" (UID: \"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" Oct 07 00:35:14 crc kubenswrapper[4791]: I1007 00:35:14.942352 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79dg4\" (UniqueName: \"kubernetes.io/projected/62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37-kube-api-access-79dg4\") pod \"default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj\" (UID: \"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" Oct 07 00:35:15 crc kubenswrapper[4791]: I1007 00:35:15.046502 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" Oct 07 00:35:15 crc kubenswrapper[4791]: I1007 00:35:15.632540 4791 scope.go:117] "RemoveContainer" containerID="81999fb1d0e5bdad043dd8ff05c313a90ea7c208a10e50490a651b23083c93d7" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.184493 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv"] Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.188485 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.192685 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-event-sg-core-configmap" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.193858 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv"] Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.345829 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/e03dece7-b0aa-4a45-a1bd-fbd6f7377006-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-f549b7776-ktssv\" (UID: \"e03dece7-b0aa-4a45-a1bd-fbd6f7377006\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.345908 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/e03dece7-b0aa-4a45-a1bd-fbd6f7377006-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-f549b7776-ktssv\" (UID: \"e03dece7-b0aa-4a45-a1bd-fbd6f7377006\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.345973 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2v958\" (UniqueName: \"kubernetes.io/projected/e03dece7-b0aa-4a45-a1bd-fbd6f7377006-kube-api-access-2v958\") pod \"default-cloud1-ceil-event-smartgateway-f549b7776-ktssv\" (UID: \"e03dece7-b0aa-4a45-a1bd-fbd6f7377006\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.346044 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/e03dece7-b0aa-4a45-a1bd-fbd6f7377006-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-f549b7776-ktssv\" (UID: \"e03dece7-b0aa-4a45-a1bd-fbd6f7377006\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.447673 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/e03dece7-b0aa-4a45-a1bd-fbd6f7377006-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-f549b7776-ktssv\" (UID: \"e03dece7-b0aa-4a45-a1bd-fbd6f7377006\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.447840 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/e03dece7-b0aa-4a45-a1bd-fbd6f7377006-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-f549b7776-ktssv\" (UID: \"e03dece7-b0aa-4a45-a1bd-fbd6f7377006\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.447947 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/e03dece7-b0aa-4a45-a1bd-fbd6f7377006-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-f549b7776-ktssv\" (UID: \"e03dece7-b0aa-4a45-a1bd-fbd6f7377006\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.448002 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2v958\" (UniqueName: \"kubernetes.io/projected/e03dece7-b0aa-4a45-a1bd-fbd6f7377006-kube-api-access-2v958\") pod \"default-cloud1-ceil-event-smartgateway-f549b7776-ktssv\" (UID: \"e03dece7-b0aa-4a45-a1bd-fbd6f7377006\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.448174 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/e03dece7-b0aa-4a45-a1bd-fbd6f7377006-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-f549b7776-ktssv\" (UID: \"e03dece7-b0aa-4a45-a1bd-fbd6f7377006\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.449026 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/e03dece7-b0aa-4a45-a1bd-fbd6f7377006-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-f549b7776-ktssv\" (UID: \"e03dece7-b0aa-4a45-a1bd-fbd6f7377006\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.453271 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/e03dece7-b0aa-4a45-a1bd-fbd6f7377006-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-f549b7776-ktssv\" (UID: \"e03dece7-b0aa-4a45-a1bd-fbd6f7377006\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.466039 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2v958\" (UniqueName: \"kubernetes.io/projected/e03dece7-b0aa-4a45-a1bd-fbd6f7377006-kube-api-access-2v958\") pod \"default-cloud1-ceil-event-smartgateway-f549b7776-ktssv\" (UID: \"e03dece7-b0aa-4a45-a1bd-fbd6f7377006\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.518580 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" Oct 07 00:35:16 crc kubenswrapper[4791]: I1007 00:35:16.803159 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h625g" event={"ID":"cea676fd-a37a-4723-a235-212c3d2a5d32","Type":"ContainerStarted","Data":"0dac33ec6007e91d59c2aecdbb06aab86973a55dd0bc533b9e2eebc9d06fe339"} Oct 07 00:35:17 crc kubenswrapper[4791]: I1007 00:35:17.813180 4791 generic.go:334] "Generic (PLEG): container finished" podID="cea676fd-a37a-4723-a235-212c3d2a5d32" containerID="0dac33ec6007e91d59c2aecdbb06aab86973a55dd0bc533b9e2eebc9d06fe339" exitCode=0 Oct 07 00:35:17 crc kubenswrapper[4791]: I1007 00:35:17.813244 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h625g" event={"ID":"cea676fd-a37a-4723-a235-212c3d2a5d32","Type":"ContainerDied","Data":"0dac33ec6007e91d59c2aecdbb06aab86973a55dd0bc533b9e2eebc9d06fe339"} Oct 07 00:35:24 crc kubenswrapper[4791]: I1007 00:35:24.887361 4791 scope.go:117] "RemoveContainer" containerID="b62955ee651d0a12a61f29a6a95303cac46ef05377e1ec1f1f46e6b54eb6eaa0" Oct 07 00:35:25 crc kubenswrapper[4791]: I1007 00:35:25.302966 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv"] Oct 07 00:35:25 crc kubenswrapper[4791]: W1007 00:35:25.307573 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode03dece7_b0aa_4a45_a1bd_fbd6f7377006.slice/crio-2a738e4dc1ce5ee2de705f955a762a2f92f3015c0b685042922f8d70520378b0 WatchSource:0}: Error finding container 2a738e4dc1ce5ee2de705f955a762a2f92f3015c0b685042922f8d70520378b0: Status 404 returned error can't find the container with id 2a738e4dc1ce5ee2de705f955a762a2f92f3015c0b685042922f8d70520378b0 Oct 07 00:35:25 crc kubenswrapper[4791]: I1007 00:35:25.364013 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj"] Oct 07 00:35:25 crc kubenswrapper[4791]: W1007 00:35:25.368165 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62d61f8f_8f11_48b3_b3d5_ecbdcdb2ba37.slice/crio-1a217a20ee0eecfa92b3f255e9deda279dfb9e747a3c0b390659ad744cad9699 WatchSource:0}: Error finding container 1a217a20ee0eecfa92b3f255e9deda279dfb9e747a3c0b390659ad744cad9699: Status 404 returned error can't find the container with id 1a217a20ee0eecfa92b3f255e9deda279dfb9e747a3c0b390659ad744cad9699 Oct 07 00:35:25 crc kubenswrapper[4791]: E1007 00:35:25.831144 4791 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest" Oct 07 00:35:25 crc kubenswrapper[4791]: E1007 00:35:25.831292 4791 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest,Command:[],Args:[-config /etc/sg-core/sg-core.conf.yaml],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:prom-https,HostPort:0,ContainerPort:8083,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:MY_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:socket-dir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:sg-core-config,ReadOnly:true,MountPath:/etc/sg-core/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8jdbt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th_service-telemetry(3a91f4b9-8355-444b-9ba0-d4ceab19244b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 00:35:25 crc kubenswrapper[4791]: E1007 00:35:25.832361 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" podUID="3a91f4b9-8355-444b-9ba0-d4ceab19244b" Oct 07 00:35:25 crc kubenswrapper[4791]: I1007 00:35:25.886057 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" event={"ID":"e03dece7-b0aa-4a45-a1bd-fbd6f7377006","Type":"ContainerStarted","Data":"997df4864d14cb93e8b6645d99ae9f5a946fae7661f13cbb70c7f3f62e182b52"} Oct 07 00:35:25 crc kubenswrapper[4791]: I1007 00:35:25.886294 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" event={"ID":"e03dece7-b0aa-4a45-a1bd-fbd6f7377006","Type":"ContainerStarted","Data":"2a738e4dc1ce5ee2de705f955a762a2f92f3015c0b685042922f8d70520378b0"} Oct 07 00:35:25 crc kubenswrapper[4791]: I1007 00:35:25.891474 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" event={"ID":"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37","Type":"ContainerStarted","Data":"82e08184e215d92667a902ace2862be4ef9e15ea55bf274d3b9a9776aed08a26"} Oct 07 00:35:25 crc kubenswrapper[4791]: I1007 00:35:25.892624 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" event={"ID":"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37","Type":"ContainerStarted","Data":"1a217a20ee0eecfa92b3f255e9deda279dfb9e747a3c0b390659ad744cad9699"} Oct 07 00:35:25 crc kubenswrapper[4791]: E1007 00:35:25.897611 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" podUID="3a91f4b9-8355-444b-9ba0-d4ceab19244b" Oct 07 00:35:26 crc kubenswrapper[4791]: I1007 00:35:26.898425 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" event={"ID":"82f12631-923c-4251-9fa8-ff7358158c78","Type":"ContainerStarted","Data":"307ffc35efce01b1408ce94a72a5b51f62d95faca73e4d2f152b114a55013bb6"} Oct 07 00:35:26 crc kubenswrapper[4791]: I1007 00:35:26.899881 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" event={"ID":"e03dece7-b0aa-4a45-a1bd-fbd6f7377006","Type":"ContainerStarted","Data":"9d6b198b3931904e688eae7f071e7633649fc53bdd16fbd63e649e38bd34c390"} Oct 07 00:35:26 crc kubenswrapper[4791]: I1007 00:35:26.901979 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" event={"ID":"ffbf236c-e7ab-41a2-8083-ff71bb300966","Type":"ContainerStarted","Data":"7665d58d7c8b07cb04020659b682ff287b9ffcba615251cb0f17001120544f33"} Oct 07 00:35:26 crc kubenswrapper[4791]: I1007 00:35:26.905626 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h625g" event={"ID":"cea676fd-a37a-4723-a235-212c3d2a5d32","Type":"ContainerStarted","Data":"7d2e8de505f6751deddb4390090f460ef38a8b0faeab8e6da355819353f5aabe"} Oct 07 00:35:26 crc kubenswrapper[4791]: I1007 00:35:26.907790 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" event={"ID":"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37","Type":"ContainerStarted","Data":"500f0774f69da98d9ac7a59f97da58367c6b8f225955754e146fa4dabae7d073"} Oct 07 00:35:26 crc kubenswrapper[4791]: I1007 00:35:26.919322 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" podStartSLOduration=4.542648141 podStartE2EDuration="23.919302186s" podCreationTimestamp="2025-10-07 00:35:03 +0000 UTC" firstStartedPulling="2025-10-07 00:35:06.30807048 +0000 UTC m=+1432.904008121" lastFinishedPulling="2025-10-07 00:35:25.684724515 +0000 UTC m=+1452.280662166" observedRunningTime="2025-10-07 00:35:26.916712521 +0000 UTC m=+1453.512650182" watchObservedRunningTime="2025-10-07 00:35:26.919302186 +0000 UTC m=+1453.515239837" Oct 07 00:35:26 crc kubenswrapper[4791]: I1007 00:35:26.940740 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" podStartSLOduration=12.352964516 podStartE2EDuration="12.940720674s" podCreationTimestamp="2025-10-07 00:35:14 +0000 UTC" firstStartedPulling="2025-10-07 00:35:25.371623136 +0000 UTC m=+1451.967560787" lastFinishedPulling="2025-10-07 00:35:25.959379284 +0000 UTC m=+1452.555316945" observedRunningTime="2025-10-07 00:35:26.937767329 +0000 UTC m=+1453.533704980" watchObservedRunningTime="2025-10-07 00:35:26.940720674 +0000 UTC m=+1453.536658325" Oct 07 00:35:26 crc kubenswrapper[4791]: I1007 00:35:26.967033 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" podStartSLOduration=10.334139293 podStartE2EDuration="10.967011533s" podCreationTimestamp="2025-10-07 00:35:16 +0000 UTC" firstStartedPulling="2025-10-07 00:35:25.310247624 +0000 UTC m=+1451.906185275" lastFinishedPulling="2025-10-07 00:35:25.943119864 +0000 UTC m=+1452.539057515" observedRunningTime="2025-10-07 00:35:26.95409545 +0000 UTC m=+1453.550033101" watchObservedRunningTime="2025-10-07 00:35:26.967011533 +0000 UTC m=+1453.562949184" Oct 07 00:35:26 crc kubenswrapper[4791]: I1007 00:35:26.976317 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-h625g" podStartSLOduration=5.89634284 podStartE2EDuration="17.976301001s" podCreationTimestamp="2025-10-07 00:35:09 +0000 UTC" firstStartedPulling="2025-10-07 00:35:13.727709383 +0000 UTC m=+1440.323647034" lastFinishedPulling="2025-10-07 00:35:25.807667544 +0000 UTC m=+1452.403605195" observedRunningTime="2025-10-07 00:35:26.975960491 +0000 UTC m=+1453.571898152" watchObservedRunningTime="2025-10-07 00:35:26.976301001 +0000 UTC m=+1453.572238652" Oct 07 00:35:27 crc kubenswrapper[4791]: I1007 00:35:27.006650 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" podStartSLOduration=7.304054323 podStartE2EDuration="20.006632097s" podCreationTimestamp="2025-10-07 00:35:07 +0000 UTC" firstStartedPulling="2025-10-07 00:35:13.062602623 +0000 UTC m=+1439.658540274" lastFinishedPulling="2025-10-07 00:35:25.765180397 +0000 UTC m=+1452.361118048" observedRunningTime="2025-10-07 00:35:26.999354837 +0000 UTC m=+1453.595292508" watchObservedRunningTime="2025-10-07 00:35:27.006632097 +0000 UTC m=+1453.602569748" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.349719 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-nv7gh"] Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.350282 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" podUID="4f73f408-67a7-4510-9ddb-a289ad131d4c" containerName="default-interconnect" containerID="cri-o://e2508cd1c67c406a06d20db7a64581b22cc46ce8a27f32744c9db1a227f9bdcb" gracePeriod=30 Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.717527 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.816652 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chsn7\" (UniqueName: \"kubernetes.io/projected/4f73f408-67a7-4510-9ddb-a289ad131d4c-kube-api-access-chsn7\") pod \"4f73f408-67a7-4510-9ddb-a289ad131d4c\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.816702 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-openstack-ca\") pod \"4f73f408-67a7-4510-9ddb-a289ad131d4c\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.816738 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-openstack-credentials\") pod \"4f73f408-67a7-4510-9ddb-a289ad131d4c\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.816762 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-inter-router-ca\") pod \"4f73f408-67a7-4510-9ddb-a289ad131d4c\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.816784 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-inter-router-credentials\") pod \"4f73f408-67a7-4510-9ddb-a289ad131d4c\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.816817 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-sasl-users\") pod \"4f73f408-67a7-4510-9ddb-a289ad131d4c\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.816881 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/4f73f408-67a7-4510-9ddb-a289ad131d4c-sasl-config\") pod \"4f73f408-67a7-4510-9ddb-a289ad131d4c\" (UID: \"4f73f408-67a7-4510-9ddb-a289ad131d4c\") " Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.817974 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f73f408-67a7-4510-9ddb-a289ad131d4c-sasl-config" (OuterVolumeSpecName: "sasl-config") pod "4f73f408-67a7-4510-9ddb-a289ad131d4c" (UID: "4f73f408-67a7-4510-9ddb-a289ad131d4c"). InnerVolumeSpecName "sasl-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.822742 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-sasl-users" (OuterVolumeSpecName: "sasl-users") pod "4f73f408-67a7-4510-9ddb-a289ad131d4c" (UID: "4f73f408-67a7-4510-9ddb-a289ad131d4c"). InnerVolumeSpecName "sasl-users". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.823351 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-inter-router-credentials" (OuterVolumeSpecName: "default-interconnect-inter-router-credentials") pod "4f73f408-67a7-4510-9ddb-a289ad131d4c" (UID: "4f73f408-67a7-4510-9ddb-a289ad131d4c"). InnerVolumeSpecName "default-interconnect-inter-router-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.826654 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-openstack-credentials" (OuterVolumeSpecName: "default-interconnect-openstack-credentials") pod "4f73f408-67a7-4510-9ddb-a289ad131d4c" (UID: "4f73f408-67a7-4510-9ddb-a289ad131d4c"). InnerVolumeSpecName "default-interconnect-openstack-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.826679 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-openstack-ca" (OuterVolumeSpecName: "default-interconnect-openstack-ca") pod "4f73f408-67a7-4510-9ddb-a289ad131d4c" (UID: "4f73f408-67a7-4510-9ddb-a289ad131d4c"). InnerVolumeSpecName "default-interconnect-openstack-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.826753 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-inter-router-ca" (OuterVolumeSpecName: "default-interconnect-inter-router-ca") pod "4f73f408-67a7-4510-9ddb-a289ad131d4c" (UID: "4f73f408-67a7-4510-9ddb-a289ad131d4c"). InnerVolumeSpecName "default-interconnect-inter-router-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.826768 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f73f408-67a7-4510-9ddb-a289ad131d4c-kube-api-access-chsn7" (OuterVolumeSpecName: "kube-api-access-chsn7") pod "4f73f408-67a7-4510-9ddb-a289ad131d4c" (UID: "4f73f408-67a7-4510-9ddb-a289ad131d4c"). InnerVolumeSpecName "kube-api-access-chsn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.917842 4791 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-openstack-credentials\") on node \"crc\" DevicePath \"\"" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.917871 4791 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-inter-router-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.917883 4791 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-inter-router-credentials\") on node \"crc\" DevicePath \"\"" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.917895 4791 reconciler_common.go:293] "Volume detached for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-sasl-users\") on node \"crc\" DevicePath \"\"" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.917906 4791 reconciler_common.go:293] "Volume detached for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/4f73f408-67a7-4510-9ddb-a289ad131d4c-sasl-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.917916 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chsn7\" (UniqueName: \"kubernetes.io/projected/4f73f408-67a7-4510-9ddb-a289ad131d4c-kube-api-access-chsn7\") on node \"crc\" DevicePath \"\"" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.917925 4791 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/4f73f408-67a7-4510-9ddb-a289ad131d4c-default-interconnect-openstack-ca\") on node \"crc\" DevicePath \"\"" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.929948 4791 generic.go:334] "Generic (PLEG): container finished" podID="4f73f408-67a7-4510-9ddb-a289ad131d4c" containerID="e2508cd1c67c406a06d20db7a64581b22cc46ce8a27f32744c9db1a227f9bdcb" exitCode=0 Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.930010 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" event={"ID":"4f73f408-67a7-4510-9ddb-a289ad131d4c","Type":"ContainerDied","Data":"e2508cd1c67c406a06d20db7a64581b22cc46ce8a27f32744c9db1a227f9bdcb"} Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.930048 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" event={"ID":"4f73f408-67a7-4510-9ddb-a289ad131d4c","Type":"ContainerDied","Data":"9c0716130141bbb36a290595f8279d8b229a41a25d2ac145439899c6941caa23"} Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.930097 4791 scope.go:117] "RemoveContainer" containerID="e2508cd1c67c406a06d20db7a64581b22cc46ce8a27f32744c9db1a227f9bdcb" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.930338 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-nv7gh" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.951391 4791 scope.go:117] "RemoveContainer" containerID="e2508cd1c67c406a06d20db7a64581b22cc46ce8a27f32744c9db1a227f9bdcb" Oct 07 00:35:28 crc kubenswrapper[4791]: E1007 00:35:28.951924 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2508cd1c67c406a06d20db7a64581b22cc46ce8a27f32744c9db1a227f9bdcb\": container with ID starting with e2508cd1c67c406a06d20db7a64581b22cc46ce8a27f32744c9db1a227f9bdcb not found: ID does not exist" containerID="e2508cd1c67c406a06d20db7a64581b22cc46ce8a27f32744c9db1a227f9bdcb" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.951963 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2508cd1c67c406a06d20db7a64581b22cc46ce8a27f32744c9db1a227f9bdcb"} err="failed to get container status \"e2508cd1c67c406a06d20db7a64581b22cc46ce8a27f32744c9db1a227f9bdcb\": rpc error: code = NotFound desc = could not find container \"e2508cd1c67c406a06d20db7a64581b22cc46ce8a27f32744c9db1a227f9bdcb\": container with ID starting with e2508cd1c67c406a06d20db7a64581b22cc46ce8a27f32744c9db1a227f9bdcb not found: ID does not exist" Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.980087 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-nv7gh"] Oct 07 00:35:28 crc kubenswrapper[4791]: I1007 00:35:28.986042 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-nv7gh"] Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.938414 4791 generic.go:334] "Generic (PLEG): container finished" podID="82f12631-923c-4251-9fa8-ff7358158c78" containerID="4291c42ee992ff449f8b2c4edaeb8a1a6d2312147d95e7e5d3d5730069e6e52b" exitCode=0 Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.938483 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" event={"ID":"82f12631-923c-4251-9fa8-ff7358158c78","Type":"ContainerDied","Data":"4291c42ee992ff449f8b2c4edaeb8a1a6d2312147d95e7e5d3d5730069e6e52b"} Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.939075 4791 scope.go:117] "RemoveContainer" containerID="4291c42ee992ff449f8b2c4edaeb8a1a6d2312147d95e7e5d3d5730069e6e52b" Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.941383 4791 generic.go:334] "Generic (PLEG): container finished" podID="e03dece7-b0aa-4a45-a1bd-fbd6f7377006" containerID="997df4864d14cb93e8b6645d99ae9f5a946fae7661f13cbb70c7f3f62e182b52" exitCode=0 Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.941466 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" event={"ID":"e03dece7-b0aa-4a45-a1bd-fbd6f7377006","Type":"ContainerDied","Data":"997df4864d14cb93e8b6645d99ae9f5a946fae7661f13cbb70c7f3f62e182b52"} Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.941821 4791 scope.go:117] "RemoveContainer" containerID="997df4864d14cb93e8b6645d99ae9f5a946fae7661f13cbb70c7f3f62e182b52" Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.946289 4791 generic.go:334] "Generic (PLEG): container finished" podID="ffbf236c-e7ab-41a2-8083-ff71bb300966" containerID="0cc78c686c66cdf185ca1aa941ca1092a74600bb68f0e2c7b1d45e739e821161" exitCode=0 Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.946382 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" event={"ID":"ffbf236c-e7ab-41a2-8083-ff71bb300966","Type":"ContainerDied","Data":"0cc78c686c66cdf185ca1aa941ca1092a74600bb68f0e2c7b1d45e739e821161"} Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.947035 4791 scope.go:117] "RemoveContainer" containerID="0cc78c686c66cdf185ca1aa941ca1092a74600bb68f0e2c7b1d45e739e821161" Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.948944 4791 generic.go:334] "Generic (PLEG): container finished" podID="62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37" containerID="82e08184e215d92667a902ace2862be4ef9e15ea55bf274d3b9a9776aed08a26" exitCode=0 Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.949022 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" event={"ID":"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37","Type":"ContainerDied","Data":"82e08184e215d92667a902ace2862be4ef9e15ea55bf274d3b9a9776aed08a26"} Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.951237 4791 scope.go:117] "RemoveContainer" containerID="82e08184e215d92667a902ace2862be4ef9e15ea55bf274d3b9a9776aed08a26" Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.954862 4791 generic.go:334] "Generic (PLEG): container finished" podID="3a91f4b9-8355-444b-9ba0-d4ceab19244b" containerID="342ad85ff80cd08a6a8025ac2c81e9934bd4c792c3724ae73a88bd74b51381b2" exitCode=0 Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.954974 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" event={"ID":"3a91f4b9-8355-444b-9ba0-d4ceab19244b","Type":"ContainerDied","Data":"342ad85ff80cd08a6a8025ac2c81e9934bd4c792c3724ae73a88bd74b51381b2"} Oct 07 00:35:29 crc kubenswrapper[4791]: I1007 00:35:29.955660 4791 scope.go:117] "RemoveContainer" containerID="342ad85ff80cd08a6a8025ac2c81e9934bd4c792c3724ae73a88bd74b51381b2" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.085393 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f73f408-67a7-4510-9ddb-a289ad131d4c" path="/var/lib/kubelet/pods/4f73f408-67a7-4510-9ddb-a289ad131d4c/volumes" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.302492 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.302553 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.314068 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-lvhbs"] Oct 07 00:35:30 crc kubenswrapper[4791]: E1007 00:35:30.314605 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f73f408-67a7-4510-9ddb-a289ad131d4c" containerName="default-interconnect" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.314677 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f73f408-67a7-4510-9ddb-a289ad131d4c" containerName="default-interconnect" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.314884 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f73f408-67a7-4510-9ddb-a289ad131d4c" containerName="default-interconnect" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.315354 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.319963 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-dockercfg-j7qj2" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.319994 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-ca" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.320156 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-credentials" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.320269 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-credentials" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.320364 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-users" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.320516 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-ca" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.323773 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-interconnect-sasl-config" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.329796 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-lvhbs"] Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.447970 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-sasl-users\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.448037 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.448085 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/333cc33f-5158-45df-864a-f4462312e8c9-sasl-config\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.448108 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xb8n\" (UniqueName: \"kubernetes.io/projected/333cc33f-5158-45df-864a-f4462312e8c9-kube-api-access-4xb8n\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.448141 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.448170 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.448219 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: E1007 00:35:30.494179 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" podUID="3a91f4b9-8355-444b-9ba0-d4ceab19244b" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.549845 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-sasl-users\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.549914 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.549977 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/333cc33f-5158-45df-864a-f4462312e8c9-sasl-config\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.550004 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xb8n\" (UniqueName: \"kubernetes.io/projected/333cc33f-5158-45df-864a-f4462312e8c9-kube-api-access-4xb8n\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.550039 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.550078 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.550130 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.551729 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/333cc33f-5158-45df-864a-f4462312e8c9-sasl-config\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.563074 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-sasl-users\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.567125 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.567868 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.571141 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.572007 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xb8n\" (UniqueName: \"kubernetes.io/projected/333cc33f-5158-45df-864a-f4462312e8c9-kube-api-access-4xb8n\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.573420 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/333cc33f-5158-45df-864a-f4462312e8c9-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-lvhbs\" (UID: \"333cc33f-5158-45df-864a-f4462312e8c9\") " pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.686061 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.968909 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" event={"ID":"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37","Type":"ContainerStarted","Data":"309d7ef65bea9752591c0b71e08b1156be897f73ca83f7be1ca4842f3b8bb2ba"} Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.971827 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" event={"ID":"3a91f4b9-8355-444b-9ba0-d4ceab19244b","Type":"ContainerStarted","Data":"c41f96fd260eef4a64387180313c8d666b36adb6e793dbacc8956adcc8650395"} Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.974433 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" event={"ID":"82f12631-923c-4251-9fa8-ff7358158c78","Type":"ContainerStarted","Data":"af9387cafb9bc728380ea3218ac60752b463e419f5974fd1ff80be628442d1c6"} Oct 07 00:35:30 crc kubenswrapper[4791]: E1007 00:35:30.974691 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" podUID="3a91f4b9-8355-444b-9ba0-d4ceab19244b" Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.977059 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" event={"ID":"e03dece7-b0aa-4a45-a1bd-fbd6f7377006","Type":"ContainerStarted","Data":"6b426ac51bb1ba7fd7c823748861a738d617d5f607227ea49fe7e3db4154b1e7"} Oct 07 00:35:30 crc kubenswrapper[4791]: I1007 00:35:30.979677 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" event={"ID":"ffbf236c-e7ab-41a2-8083-ff71bb300966","Type":"ContainerStarted","Data":"685ce353d022b2d44ccd8acd1eacc043770a0ddef6bc26d867bcd3d6521ae278"} Oct 07 00:35:31 crc kubenswrapper[4791]: I1007 00:35:31.124386 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-lvhbs"] Oct 07 00:35:31 crc kubenswrapper[4791]: I1007 00:35:31.360911 4791 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-h625g" podUID="cea676fd-a37a-4723-a235-212c3d2a5d32" containerName="registry-server" probeResult="failure" output=< Oct 07 00:35:31 crc kubenswrapper[4791]: timeout: failed to connect service ":50051" within 1s Oct 07 00:35:31 crc kubenswrapper[4791]: > Oct 07 00:35:31 crc kubenswrapper[4791]: I1007 00:35:31.989288 4791 generic.go:334] "Generic (PLEG): container finished" podID="82f12631-923c-4251-9fa8-ff7358158c78" containerID="af9387cafb9bc728380ea3218ac60752b463e419f5974fd1ff80be628442d1c6" exitCode=0 Oct 07 00:35:31 crc kubenswrapper[4791]: I1007 00:35:31.989337 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" event={"ID":"82f12631-923c-4251-9fa8-ff7358158c78","Type":"ContainerDied","Data":"af9387cafb9bc728380ea3218ac60752b463e419f5974fd1ff80be628442d1c6"} Oct 07 00:35:31 crc kubenswrapper[4791]: I1007 00:35:31.989380 4791 scope.go:117] "RemoveContainer" containerID="4291c42ee992ff449f8b2c4edaeb8a1a6d2312147d95e7e5d3d5730069e6e52b" Oct 07 00:35:31 crc kubenswrapper[4791]: I1007 00:35:31.990021 4791 scope.go:117] "RemoveContainer" containerID="af9387cafb9bc728380ea3218ac60752b463e419f5974fd1ff80be628442d1c6" Oct 07 00:35:31 crc kubenswrapper[4791]: E1007 00:35:31.990233 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-meter-smartgateway-57948895dc-86mts_service-telemetry(82f12631-923c-4251-9fa8-ff7358158c78)\"" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" podUID="82f12631-923c-4251-9fa8-ff7358158c78" Oct 07 00:35:31 crc kubenswrapper[4791]: I1007 00:35:31.992254 4791 generic.go:334] "Generic (PLEG): container finished" podID="e03dece7-b0aa-4a45-a1bd-fbd6f7377006" containerID="6b426ac51bb1ba7fd7c823748861a738d617d5f607227ea49fe7e3db4154b1e7" exitCode=0 Oct 07 00:35:31 crc kubenswrapper[4791]: I1007 00:35:31.992303 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" event={"ID":"e03dece7-b0aa-4a45-a1bd-fbd6f7377006","Type":"ContainerDied","Data":"6b426ac51bb1ba7fd7c823748861a738d617d5f607227ea49fe7e3db4154b1e7"} Oct 07 00:35:31 crc kubenswrapper[4791]: I1007 00:35:31.993041 4791 scope.go:117] "RemoveContainer" containerID="6b426ac51bb1ba7fd7c823748861a738d617d5f607227ea49fe7e3db4154b1e7" Oct 07 00:35:31 crc kubenswrapper[4791]: E1007 00:35:31.993250 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-event-smartgateway-f549b7776-ktssv_service-telemetry(e03dece7-b0aa-4a45-a1bd-fbd6f7377006)\"" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" podUID="e03dece7-b0aa-4a45-a1bd-fbd6f7377006" Oct 07 00:35:31 crc kubenswrapper[4791]: I1007 00:35:31.995669 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" event={"ID":"333cc33f-5158-45df-864a-f4462312e8c9","Type":"ContainerStarted","Data":"7fcd400b49645e58b88b80a3888fa40c858eb99c1d0fc11fdb0c1b7eab2a6c9f"} Oct 07 00:35:31 crc kubenswrapper[4791]: I1007 00:35:31.995704 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" event={"ID":"333cc33f-5158-45df-864a-f4462312e8c9","Type":"ContainerStarted","Data":"03a943ceaeb9aea01394b1a772c4b0ba733de09466486ffd0a95bc4e078dce91"} Oct 07 00:35:32 crc kubenswrapper[4791]: I1007 00:35:32.000120 4791 generic.go:334] "Generic (PLEG): container finished" podID="ffbf236c-e7ab-41a2-8083-ff71bb300966" containerID="685ce353d022b2d44ccd8acd1eacc043770a0ddef6bc26d867bcd3d6521ae278" exitCode=0 Oct 07 00:35:32 crc kubenswrapper[4791]: I1007 00:35:32.000176 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" event={"ID":"ffbf236c-e7ab-41a2-8083-ff71bb300966","Type":"ContainerDied","Data":"685ce353d022b2d44ccd8acd1eacc043770a0ddef6bc26d867bcd3d6521ae278"} Oct 07 00:35:32 crc kubenswrapper[4791]: I1007 00:35:32.000661 4791 scope.go:117] "RemoveContainer" containerID="685ce353d022b2d44ccd8acd1eacc043770a0ddef6bc26d867bcd3d6521ae278" Oct 07 00:35:32 crc kubenswrapper[4791]: E1007 00:35:32.000856 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h_service-telemetry(ffbf236c-e7ab-41a2-8083-ff71bb300966)\"" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" podUID="ffbf236c-e7ab-41a2-8083-ff71bb300966" Oct 07 00:35:32 crc kubenswrapper[4791]: I1007 00:35:32.002724 4791 generic.go:334] "Generic (PLEG): container finished" podID="62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37" containerID="309d7ef65bea9752591c0b71e08b1156be897f73ca83f7be1ca4842f3b8bb2ba" exitCode=0 Oct 07 00:35:32 crc kubenswrapper[4791]: I1007 00:35:32.002774 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" event={"ID":"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37","Type":"ContainerDied","Data":"309d7ef65bea9752591c0b71e08b1156be897f73ca83f7be1ca4842f3b8bb2ba"} Oct 07 00:35:32 crc kubenswrapper[4791]: I1007 00:35:32.003036 4791 scope.go:117] "RemoveContainer" containerID="309d7ef65bea9752591c0b71e08b1156be897f73ca83f7be1ca4842f3b8bb2ba" Oct 07 00:35:32 crc kubenswrapper[4791]: E1007 00:35:32.003193 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj_service-telemetry(62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37)\"" pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" podUID="62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37" Oct 07 00:35:32 crc kubenswrapper[4791]: I1007 00:35:32.005013 4791 generic.go:334] "Generic (PLEG): container finished" podID="3a91f4b9-8355-444b-9ba0-d4ceab19244b" containerID="c41f96fd260eef4a64387180313c8d666b36adb6e793dbacc8956adcc8650395" exitCode=0 Oct 07 00:35:32 crc kubenswrapper[4791]: I1007 00:35:32.005039 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" event={"ID":"3a91f4b9-8355-444b-9ba0-d4ceab19244b","Type":"ContainerDied","Data":"c41f96fd260eef4a64387180313c8d666b36adb6e793dbacc8956adcc8650395"} Oct 07 00:35:32 crc kubenswrapper[4791]: I1007 00:35:32.005326 4791 scope.go:117] "RemoveContainer" containerID="c41f96fd260eef4a64387180313c8d666b36adb6e793dbacc8956adcc8650395" Oct 07 00:35:32 crc kubenswrapper[4791]: E1007 00:35:32.014769 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th_service-telemetry(3a91f4b9-8355-444b-9ba0-d4ceab19244b)\", failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"]" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" podUID="3a91f4b9-8355-444b-9ba0-d4ceab19244b" Oct 07 00:35:32 crc kubenswrapper[4791]: I1007 00:35:32.025169 4791 scope.go:117] "RemoveContainer" containerID="997df4864d14cb93e8b6645d99ae9f5a946fae7661f13cbb70c7f3f62e182b52" Oct 07 00:35:32 crc kubenswrapper[4791]: I1007 00:35:32.088611 4791 scope.go:117] "RemoveContainer" containerID="0cc78c686c66cdf185ca1aa941ca1092a74600bb68f0e2c7b1d45e739e821161" Oct 07 00:35:32 crc kubenswrapper[4791]: I1007 00:35:32.141828 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-lvhbs" podStartSLOduration=4.141799991 podStartE2EDuration="4.141799991s" podCreationTimestamp="2025-10-07 00:35:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 00:35:32.1334525 +0000 UTC m=+1458.729390161" watchObservedRunningTime="2025-10-07 00:35:32.141799991 +0000 UTC m=+1458.737737642" Oct 07 00:35:32 crc kubenswrapper[4791]: I1007 00:35:32.142568 4791 scope.go:117] "RemoveContainer" containerID="82e08184e215d92667a902ace2862be4ef9e15ea55bf274d3b9a9776aed08a26" Oct 07 00:35:32 crc kubenswrapper[4791]: I1007 00:35:32.183840 4791 scope.go:117] "RemoveContainer" containerID="342ad85ff80cd08a6a8025ac2c81e9934bd4c792c3724ae73a88bd74b51381b2" Oct 07 00:35:40 crc kubenswrapper[4791]: I1007 00:35:40.346850 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:40 crc kubenswrapper[4791]: I1007 00:35:40.391916 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:40 crc kubenswrapper[4791]: I1007 00:35:40.577605 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h625g"] Oct 07 00:35:41 crc kubenswrapper[4791]: I1007 00:35:41.600603 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:35:41 crc kubenswrapper[4791]: I1007 00:35:41.600665 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:35:42 crc kubenswrapper[4791]: I1007 00:35:42.071466 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-h625g" podUID="cea676fd-a37a-4723-a235-212c3d2a5d32" containerName="registry-server" containerID="cri-o://7d2e8de505f6751deddb4390090f460ef38a8b0faeab8e6da355819353f5aabe" gracePeriod=2 Oct 07 00:35:42 crc kubenswrapper[4791]: I1007 00:35:42.451125 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:42 crc kubenswrapper[4791]: I1007 00:35:42.525711 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea676fd-a37a-4723-a235-212c3d2a5d32-catalog-content\") pod \"cea676fd-a37a-4723-a235-212c3d2a5d32\" (UID: \"cea676fd-a37a-4723-a235-212c3d2a5d32\") " Oct 07 00:35:42 crc kubenswrapper[4791]: I1007 00:35:42.525773 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lglcm\" (UniqueName: \"kubernetes.io/projected/cea676fd-a37a-4723-a235-212c3d2a5d32-kube-api-access-lglcm\") pod \"cea676fd-a37a-4723-a235-212c3d2a5d32\" (UID: \"cea676fd-a37a-4723-a235-212c3d2a5d32\") " Oct 07 00:35:42 crc kubenswrapper[4791]: I1007 00:35:42.525835 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea676fd-a37a-4723-a235-212c3d2a5d32-utilities\") pod \"cea676fd-a37a-4723-a235-212c3d2a5d32\" (UID: \"cea676fd-a37a-4723-a235-212c3d2a5d32\") " Oct 07 00:35:42 crc kubenswrapper[4791]: I1007 00:35:42.526777 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cea676fd-a37a-4723-a235-212c3d2a5d32-utilities" (OuterVolumeSpecName: "utilities") pod "cea676fd-a37a-4723-a235-212c3d2a5d32" (UID: "cea676fd-a37a-4723-a235-212c3d2a5d32"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:35:42 crc kubenswrapper[4791]: I1007 00:35:42.527275 4791 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea676fd-a37a-4723-a235-212c3d2a5d32-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 00:35:42 crc kubenswrapper[4791]: I1007 00:35:42.531333 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cea676fd-a37a-4723-a235-212c3d2a5d32-kube-api-access-lglcm" (OuterVolumeSpecName: "kube-api-access-lglcm") pod "cea676fd-a37a-4723-a235-212c3d2a5d32" (UID: "cea676fd-a37a-4723-a235-212c3d2a5d32"). InnerVolumeSpecName "kube-api-access-lglcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:35:42 crc kubenswrapper[4791]: I1007 00:35:42.620438 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cea676fd-a37a-4723-a235-212c3d2a5d32-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cea676fd-a37a-4723-a235-212c3d2a5d32" (UID: "cea676fd-a37a-4723-a235-212c3d2a5d32"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:35:42 crc kubenswrapper[4791]: I1007 00:35:42.628204 4791 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea676fd-a37a-4723-a235-212c3d2a5d32-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 00:35:42 crc kubenswrapper[4791]: I1007 00:35:42.628248 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lglcm\" (UniqueName: \"kubernetes.io/projected/cea676fd-a37a-4723-a235-212c3d2a5d32-kube-api-access-lglcm\") on node \"crc\" DevicePath \"\"" Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.081988 4791 generic.go:334] "Generic (PLEG): container finished" podID="cea676fd-a37a-4723-a235-212c3d2a5d32" containerID="7d2e8de505f6751deddb4390090f460ef38a8b0faeab8e6da355819353f5aabe" exitCode=0 Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.082037 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h625g" event={"ID":"cea676fd-a37a-4723-a235-212c3d2a5d32","Type":"ContainerDied","Data":"7d2e8de505f6751deddb4390090f460ef38a8b0faeab8e6da355819353f5aabe"} Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.082070 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h625g" event={"ID":"cea676fd-a37a-4723-a235-212c3d2a5d32","Type":"ContainerDied","Data":"b45a1df0efb9b4b70e9cb2c68b099555cc5f003a54960f02eebd48017baf97cf"} Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.082077 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h625g" Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.082092 4791 scope.go:117] "RemoveContainer" containerID="7d2e8de505f6751deddb4390090f460ef38a8b0faeab8e6da355819353f5aabe" Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.101489 4791 scope.go:117] "RemoveContainer" containerID="0dac33ec6007e91d59c2aecdbb06aab86973a55dd0bc533b9e2eebc9d06fe339" Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.118310 4791 scope.go:117] "RemoveContainer" containerID="7234d01d23a649e2e8840c7c4de389dd1c05e0da79883490995742595fcca9ed" Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.149659 4791 scope.go:117] "RemoveContainer" containerID="7d2e8de505f6751deddb4390090f460ef38a8b0faeab8e6da355819353f5aabe" Oct 07 00:35:43 crc kubenswrapper[4791]: E1007 00:35:43.150023 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d2e8de505f6751deddb4390090f460ef38a8b0faeab8e6da355819353f5aabe\": container with ID starting with 7d2e8de505f6751deddb4390090f460ef38a8b0faeab8e6da355819353f5aabe not found: ID does not exist" containerID="7d2e8de505f6751deddb4390090f460ef38a8b0faeab8e6da355819353f5aabe" Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.150068 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d2e8de505f6751deddb4390090f460ef38a8b0faeab8e6da355819353f5aabe"} err="failed to get container status \"7d2e8de505f6751deddb4390090f460ef38a8b0faeab8e6da355819353f5aabe\": rpc error: code = NotFound desc = could not find container \"7d2e8de505f6751deddb4390090f460ef38a8b0faeab8e6da355819353f5aabe\": container with ID starting with 7d2e8de505f6751deddb4390090f460ef38a8b0faeab8e6da355819353f5aabe not found: ID does not exist" Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.150097 4791 scope.go:117] "RemoveContainer" containerID="0dac33ec6007e91d59c2aecdbb06aab86973a55dd0bc533b9e2eebc9d06fe339" Oct 07 00:35:43 crc kubenswrapper[4791]: E1007 00:35:43.150304 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dac33ec6007e91d59c2aecdbb06aab86973a55dd0bc533b9e2eebc9d06fe339\": container with ID starting with 0dac33ec6007e91d59c2aecdbb06aab86973a55dd0bc533b9e2eebc9d06fe339 not found: ID does not exist" containerID="0dac33ec6007e91d59c2aecdbb06aab86973a55dd0bc533b9e2eebc9d06fe339" Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.150332 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dac33ec6007e91d59c2aecdbb06aab86973a55dd0bc533b9e2eebc9d06fe339"} err="failed to get container status \"0dac33ec6007e91d59c2aecdbb06aab86973a55dd0bc533b9e2eebc9d06fe339\": rpc error: code = NotFound desc = could not find container \"0dac33ec6007e91d59c2aecdbb06aab86973a55dd0bc533b9e2eebc9d06fe339\": container with ID starting with 0dac33ec6007e91d59c2aecdbb06aab86973a55dd0bc533b9e2eebc9d06fe339 not found: ID does not exist" Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.150350 4791 scope.go:117] "RemoveContainer" containerID="7234d01d23a649e2e8840c7c4de389dd1c05e0da79883490995742595fcca9ed" Oct 07 00:35:43 crc kubenswrapper[4791]: E1007 00:35:43.150576 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7234d01d23a649e2e8840c7c4de389dd1c05e0da79883490995742595fcca9ed\": container with ID starting with 7234d01d23a649e2e8840c7c4de389dd1c05e0da79883490995742595fcca9ed not found: ID does not exist" containerID="7234d01d23a649e2e8840c7c4de389dd1c05e0da79883490995742595fcca9ed" Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.150597 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7234d01d23a649e2e8840c7c4de389dd1c05e0da79883490995742595fcca9ed"} err="failed to get container status \"7234d01d23a649e2e8840c7c4de389dd1c05e0da79883490995742595fcca9ed\": rpc error: code = NotFound desc = could not find container \"7234d01d23a649e2e8840c7c4de389dd1c05e0da79883490995742595fcca9ed\": container with ID starting with 7234d01d23a649e2e8840c7c4de389dd1c05e0da79883490995742595fcca9ed not found: ID does not exist" Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.152938 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h625g"] Oct 07 00:35:43 crc kubenswrapper[4791]: I1007 00:35:43.159561 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-h625g"] Oct 07 00:35:44 crc kubenswrapper[4791]: I1007 00:35:44.079302 4791 scope.go:117] "RemoveContainer" containerID="af9387cafb9bc728380ea3218ac60752b463e419f5974fd1ff80be628442d1c6" Oct 07 00:35:44 crc kubenswrapper[4791]: I1007 00:35:44.079522 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cea676fd-a37a-4723-a235-212c3d2a5d32" path="/var/lib/kubelet/pods/cea676fd-a37a-4723-a235-212c3d2a5d32/volumes" Oct 07 00:35:45 crc kubenswrapper[4791]: I1007 00:35:45.069670 4791 scope.go:117] "RemoveContainer" containerID="685ce353d022b2d44ccd8acd1eacc043770a0ddef6bc26d867bcd3d6521ae278" Oct 07 00:35:45 crc kubenswrapper[4791]: I1007 00:35:45.131721 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-86mts" event={"ID":"82f12631-923c-4251-9fa8-ff7358158c78","Type":"ContainerStarted","Data":"44afb996664c5e37032071ff7596c654f8a925de9cb80f06209641d9b7e663cd"} Oct 07 00:35:46 crc kubenswrapper[4791]: I1007 00:35:46.073853 4791 scope.go:117] "RemoveContainer" containerID="c41f96fd260eef4a64387180313c8d666b36adb6e793dbacc8956adcc8650395" Oct 07 00:35:46 crc kubenswrapper[4791]: I1007 00:35:46.074642 4791 scope.go:117] "RemoveContainer" containerID="309d7ef65bea9752591c0b71e08b1156be897f73ca83f7be1ca4842f3b8bb2ba" Oct 07 00:35:46 crc kubenswrapper[4791]: I1007 00:35:46.144563 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h" event={"ID":"ffbf236c-e7ab-41a2-8083-ff71bb300966","Type":"ContainerStarted","Data":"a15dd73b75885873c8d00427bff5fa8cf6b0a350fb00d81026929a6d32021934"} Oct 07 00:35:47 crc kubenswrapper[4791]: I1007 00:35:47.068616 4791 scope.go:117] "RemoveContainer" containerID="6b426ac51bb1ba7fd7c823748861a738d617d5f607227ea49fe7e3db4154b1e7" Oct 07 00:35:48 crc kubenswrapper[4791]: I1007 00:35:48.166153 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj" event={"ID":"62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37","Type":"ContainerStarted","Data":"50e25db4bb647de9b0760fdd04f531e1ded6e7463d442acd9932fe227958845a"} Oct 07 00:35:48 crc kubenswrapper[4791]: I1007 00:35:48.169000 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" event={"ID":"3a91f4b9-8355-444b-9ba0-d4ceab19244b","Type":"ContainerStarted","Data":"70aee367e16c2e64c83c2708476078d855a08dc4ecc98e0d850ff3e262416ad0"} Oct 07 00:35:50 crc kubenswrapper[4791]: I1007 00:35:50.184184 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f549b7776-ktssv" event={"ID":"e03dece7-b0aa-4a45-a1bd-fbd6f7377006","Type":"ContainerStarted","Data":"2ce2966ee2f8b47d9206559ec280b20976a6ef024c870b1d2efaac630c7682c0"} Oct 07 00:35:51 crc kubenswrapper[4791]: I1007 00:35:51.194964 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" event={"ID":"3a91f4b9-8355-444b-9ba0-d4ceab19244b","Type":"ContainerStarted","Data":"d93f1fcdfb7c7d8ea386d9bc8b578d0222f05eb3fbb910307db9f7876702e897"} Oct 07 00:35:51 crc kubenswrapper[4791]: I1007 00:35:51.216154 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th" podStartSLOduration=4.85845529 podStartE2EDuration="50.216135639s" podCreationTimestamp="2025-10-07 00:35:01 +0000 UTC" firstStartedPulling="2025-10-07 00:35:04.814327387 +0000 UTC m=+1431.410265038" lastFinishedPulling="2025-10-07 00:35:50.172007726 +0000 UTC m=+1476.767945387" observedRunningTime="2025-10-07 00:35:51.213618056 +0000 UTC m=+1477.809555707" watchObservedRunningTime="2025-10-07 00:35:51.216135639 +0000 UTC m=+1477.812073290" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.243109 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/qdr-test"] Oct 07 00:36:02 crc kubenswrapper[4791]: E1007 00:36:02.243913 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cea676fd-a37a-4723-a235-212c3d2a5d32" containerName="extract-content" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.243927 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="cea676fd-a37a-4723-a235-212c3d2a5d32" containerName="extract-content" Oct 07 00:36:02 crc kubenswrapper[4791]: E1007 00:36:02.243943 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cea676fd-a37a-4723-a235-212c3d2a5d32" containerName="registry-server" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.243949 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="cea676fd-a37a-4723-a235-212c3d2a5d32" containerName="registry-server" Oct 07 00:36:02 crc kubenswrapper[4791]: E1007 00:36:02.243962 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cea676fd-a37a-4723-a235-212c3d2a5d32" containerName="extract-utilities" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.243968 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="cea676fd-a37a-4723-a235-212c3d2a5d32" containerName="extract-utilities" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.244105 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="cea676fd-a37a-4723-a235-212c3d2a5d32" containerName="registry-server" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.244715 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.247869 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"qdr-test-config" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.248230 4791 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-selfsigned" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.255886 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.331666 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n68g\" (UniqueName: \"kubernetes.io/projected/614d9c30-e7cd-48ac-8989-5f48bebe9ef8-kube-api-access-9n68g\") pod \"qdr-test\" (UID: \"614d9c30-e7cd-48ac-8989-5f48bebe9ef8\") " pod="service-telemetry/qdr-test" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.331760 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/614d9c30-e7cd-48ac-8989-5f48bebe9ef8-qdr-test-config\") pod \"qdr-test\" (UID: \"614d9c30-e7cd-48ac-8989-5f48bebe9ef8\") " pod="service-telemetry/qdr-test" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.331789 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/614d9c30-e7cd-48ac-8989-5f48bebe9ef8-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"614d9c30-e7cd-48ac-8989-5f48bebe9ef8\") " pod="service-telemetry/qdr-test" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.433280 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/614d9c30-e7cd-48ac-8989-5f48bebe9ef8-qdr-test-config\") pod \"qdr-test\" (UID: \"614d9c30-e7cd-48ac-8989-5f48bebe9ef8\") " pod="service-telemetry/qdr-test" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.433335 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/614d9c30-e7cd-48ac-8989-5f48bebe9ef8-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"614d9c30-e7cd-48ac-8989-5f48bebe9ef8\") " pod="service-telemetry/qdr-test" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.433425 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n68g\" (UniqueName: \"kubernetes.io/projected/614d9c30-e7cd-48ac-8989-5f48bebe9ef8-kube-api-access-9n68g\") pod \"qdr-test\" (UID: \"614d9c30-e7cd-48ac-8989-5f48bebe9ef8\") " pod="service-telemetry/qdr-test" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.434394 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/614d9c30-e7cd-48ac-8989-5f48bebe9ef8-qdr-test-config\") pod \"qdr-test\" (UID: \"614d9c30-e7cd-48ac-8989-5f48bebe9ef8\") " pod="service-telemetry/qdr-test" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.440048 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/614d9c30-e7cd-48ac-8989-5f48bebe9ef8-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"614d9c30-e7cd-48ac-8989-5f48bebe9ef8\") " pod="service-telemetry/qdr-test" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.451426 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n68g\" (UniqueName: \"kubernetes.io/projected/614d9c30-e7cd-48ac-8989-5f48bebe9ef8-kube-api-access-9n68g\") pod \"qdr-test\" (UID: \"614d9c30-e7cd-48ac-8989-5f48bebe9ef8\") " pod="service-telemetry/qdr-test" Oct 07 00:36:02 crc kubenswrapper[4791]: I1007 00:36:02.563035 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Oct 07 00:36:03 crc kubenswrapper[4791]: W1007 00:36:03.551934 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod614d9c30_e7cd_48ac_8989_5f48bebe9ef8.slice/crio-906e30b6b9d48770c89455b27e8694467ffa9c09e11f70dedc73b85cb2c708de WatchSource:0}: Error finding container 906e30b6b9d48770c89455b27e8694467ffa9c09e11f70dedc73b85cb2c708de: Status 404 returned error can't find the container with id 906e30b6b9d48770c89455b27e8694467ffa9c09e11f70dedc73b85cb2c708de Oct 07 00:36:03 crc kubenswrapper[4791]: I1007 00:36:03.552262 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Oct 07 00:36:04 crc kubenswrapper[4791]: I1007 00:36:04.286190 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"614d9c30-e7cd-48ac-8989-5f48bebe9ef8","Type":"ContainerStarted","Data":"906e30b6b9d48770c89455b27e8694467ffa9c09e11f70dedc73b85cb2c708de"} Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.332384 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"614d9c30-e7cd-48ac-8989-5f48bebe9ef8","Type":"ContainerStarted","Data":"86fc69899f1f3d60d7704eddb60f21807127189b49b896c4133d407aa93a8581"} Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.349676 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/qdr-test" podStartSLOduration=1.916265424 podStartE2EDuration="8.349651213s" podCreationTimestamp="2025-10-07 00:36:02 +0000 UTC" firstStartedPulling="2025-10-07 00:36:03.556986975 +0000 UTC m=+1490.152924626" lastFinishedPulling="2025-10-07 00:36:09.990372764 +0000 UTC m=+1496.586310415" observedRunningTime="2025-10-07 00:36:10.343804176 +0000 UTC m=+1496.939741827" watchObservedRunningTime="2025-10-07 00:36:10.349651213 +0000 UTC m=+1496.945588864" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.741355 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-xwzff"] Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.742761 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.745796 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.746493 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.746665 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.746949 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.746960 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.747173 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.748274 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-xwzff"] Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.860656 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-collectd-config\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.860831 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-sensubility-config\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.860883 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-ceilometer-publisher\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.860929 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzxvw\" (UniqueName: \"kubernetes.io/projected/35af1125-b5a2-4028-b610-3b5957d0c6e7-kube-api-access-fzxvw\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.860988 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.861083 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.861158 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-healthcheck-log\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.962481 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.962563 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-healthcheck-log\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.962608 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-collectd-config\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.962689 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-sensubility-config\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.962714 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-ceilometer-publisher\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.962739 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzxvw\" (UniqueName: \"kubernetes.io/projected/35af1125-b5a2-4028-b610-3b5957d0c6e7-kube-api-access-fzxvw\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.962777 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.963604 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-collectd-config\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.963658 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-sensubility-config\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.963916 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-ceilometer-publisher\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.964164 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.964333 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-healthcheck-log\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.964356 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:10 crc kubenswrapper[4791]: I1007 00:36:10.986528 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzxvw\" (UniqueName: \"kubernetes.io/projected/35af1125-b5a2-4028-b610-3b5957d0c6e7-kube-api-access-fzxvw\") pod \"stf-smoketest-smoke1-xwzff\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.059051 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.234496 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/curl"] Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.236044 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.238829 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.368883 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2czf\" (UniqueName: \"kubernetes.io/projected/edce564f-f571-4bc9-8a65-fe088f32f270-kube-api-access-x2czf\") pod \"curl\" (UID: \"edce564f-f571-4bc9-8a65-fe088f32f270\") " pod="service-telemetry/curl" Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.453765 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-xwzff"] Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.470387 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2czf\" (UniqueName: \"kubernetes.io/projected/edce564f-f571-4bc9-8a65-fe088f32f270-kube-api-access-x2czf\") pod \"curl\" (UID: \"edce564f-f571-4bc9-8a65-fe088f32f270\") " pod="service-telemetry/curl" Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.491597 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2czf\" (UniqueName: \"kubernetes.io/projected/edce564f-f571-4bc9-8a65-fe088f32f270-kube-api-access-x2czf\") pod \"curl\" (UID: \"edce564f-f571-4bc9-8a65-fe088f32f270\") " pod="service-telemetry/curl" Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.554977 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.601045 4791 patch_prober.go:28] interesting pod/machine-config-daemon-h728c container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.601107 4791 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.601151 4791 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h728c" Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.601885 4791 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e"} pod="openshift-machine-config-operator/machine-config-daemon-h728c" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.601943 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerName="machine-config-daemon" containerID="cri-o://adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" gracePeriod=600 Oct 07 00:36:11 crc kubenswrapper[4791]: E1007 00:36:11.721356 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:36:11 crc kubenswrapper[4791]: I1007 00:36:11.757615 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Oct 07 00:36:11 crc kubenswrapper[4791]: W1007 00:36:11.758569 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podedce564f_f571_4bc9_8a65_fe088f32f270.slice/crio-213d55d18afac34024de0698cf4541454dd7bcf1ebb5e622f5d1fe69f01f343a WatchSource:0}: Error finding container 213d55d18afac34024de0698cf4541454dd7bcf1ebb5e622f5d1fe69f01f343a: Status 404 returned error can't find the container with id 213d55d18afac34024de0698cf4541454dd7bcf1ebb5e622f5d1fe69f01f343a Oct 07 00:36:12 crc kubenswrapper[4791]: I1007 00:36:12.349959 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-xwzff" event={"ID":"35af1125-b5a2-4028-b610-3b5957d0c6e7","Type":"ContainerStarted","Data":"7bfe6c1888a140dd95e1dd854f8902e5b112d23478a0f09e1b274984f82f22cf"} Oct 07 00:36:12 crc kubenswrapper[4791]: I1007 00:36:12.351543 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"edce564f-f571-4bc9-8a65-fe088f32f270","Type":"ContainerStarted","Data":"213d55d18afac34024de0698cf4541454dd7bcf1ebb5e622f5d1fe69f01f343a"} Oct 07 00:36:12 crc kubenswrapper[4791]: I1007 00:36:12.354170 4791 generic.go:334] "Generic (PLEG): container finished" podID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" exitCode=0 Oct 07 00:36:12 crc kubenswrapper[4791]: I1007 00:36:12.354225 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerDied","Data":"adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e"} Oct 07 00:36:12 crc kubenswrapper[4791]: I1007 00:36:12.354270 4791 scope.go:117] "RemoveContainer" containerID="186c564d61df70f559e1048abd8501416d9bf37bf9acf5cdce844554cae2f448" Oct 07 00:36:12 crc kubenswrapper[4791]: I1007 00:36:12.355714 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:36:12 crc kubenswrapper[4791]: E1007 00:36:12.356238 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:36:14 crc kubenswrapper[4791]: I1007 00:36:14.385909 4791 generic.go:334] "Generic (PLEG): container finished" podID="edce564f-f571-4bc9-8a65-fe088f32f270" containerID="404edf84f3235defca9872c6be0c85fc1938f39a345152d9a9e522e0052da520" exitCode=0 Oct 07 00:36:14 crc kubenswrapper[4791]: I1007 00:36:14.385970 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"edce564f-f571-4bc9-8a65-fe088f32f270","Type":"ContainerDied","Data":"404edf84f3235defca9872c6be0c85fc1938f39a345152d9a9e522e0052da520"} Oct 07 00:36:19 crc kubenswrapper[4791]: I1007 00:36:19.012292 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Oct 07 00:36:19 crc kubenswrapper[4791]: I1007 00:36:19.076532 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2czf\" (UniqueName: \"kubernetes.io/projected/edce564f-f571-4bc9-8a65-fe088f32f270-kube-api-access-x2czf\") pod \"edce564f-f571-4bc9-8a65-fe088f32f270\" (UID: \"edce564f-f571-4bc9-8a65-fe088f32f270\") " Oct 07 00:36:19 crc kubenswrapper[4791]: I1007 00:36:19.081447 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edce564f-f571-4bc9-8a65-fe088f32f270-kube-api-access-x2czf" (OuterVolumeSpecName: "kube-api-access-x2czf") pod "edce564f-f571-4bc9-8a65-fe088f32f270" (UID: "edce564f-f571-4bc9-8a65-fe088f32f270"). InnerVolumeSpecName "kube-api-access-x2czf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:36:19 crc kubenswrapper[4791]: I1007 00:36:19.178595 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2czf\" (UniqueName: \"kubernetes.io/projected/edce564f-f571-4bc9-8a65-fe088f32f270-kube-api-access-x2czf\") on node \"crc\" DevicePath \"\"" Oct 07 00:36:19 crc kubenswrapper[4791]: I1007 00:36:19.224048 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_curl_edce564f-f571-4bc9-8a65-fe088f32f270/curl/0.log" Oct 07 00:36:19 crc kubenswrapper[4791]: I1007 00:36:19.421811 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"edce564f-f571-4bc9-8a65-fe088f32f270","Type":"ContainerDied","Data":"213d55d18afac34024de0698cf4541454dd7bcf1ebb5e622f5d1fe69f01f343a"} Oct 07 00:36:19 crc kubenswrapper[4791]: I1007 00:36:19.421856 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="213d55d18afac34024de0698cf4541454dd7bcf1ebb5e622f5d1fe69f01f343a" Oct 07 00:36:19 crc kubenswrapper[4791]: I1007 00:36:19.421862 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Oct 07 00:36:19 crc kubenswrapper[4791]: I1007 00:36:19.537968 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-6856cfb745-kj4sf_4d373495-5026-4d85-af77-e8e11f853bf2/prometheus-webhook-snmp/0.log" Oct 07 00:36:22 crc kubenswrapper[4791]: I1007 00:36:22.443655 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-xwzff" event={"ID":"35af1125-b5a2-4028-b610-3b5957d0c6e7","Type":"ContainerStarted","Data":"052792ee479bd78236c98cbe57eec092bc1e03574ead29cf0cf693498b8477cf"} Oct 07 00:36:23 crc kubenswrapper[4791]: I1007 00:36:23.069541 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:36:23 crc kubenswrapper[4791]: E1007 00:36:23.069851 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:36:30 crc kubenswrapper[4791]: I1007 00:36:30.526849 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-xwzff" event={"ID":"35af1125-b5a2-4028-b610-3b5957d0c6e7","Type":"ContainerStarted","Data":"1d6c68f07670337daff8536f9dfa6b1f1a545351d1d3f6ceb91a7c6e39da1b71"} Oct 07 00:36:30 crc kubenswrapper[4791]: I1007 00:36:30.552423 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/stf-smoketest-smoke1-xwzff" podStartSLOduration=2.038562522 podStartE2EDuration="20.552389521s" podCreationTimestamp="2025-10-07 00:36:10 +0000 UTC" firstStartedPulling="2025-10-07 00:36:11.460450082 +0000 UTC m=+1498.056387733" lastFinishedPulling="2025-10-07 00:36:29.974277081 +0000 UTC m=+1516.570214732" observedRunningTime="2025-10-07 00:36:30.548181651 +0000 UTC m=+1517.144119302" watchObservedRunningTime="2025-10-07 00:36:30.552389521 +0000 UTC m=+1517.148327172" Oct 07 00:36:37 crc kubenswrapper[4791]: I1007 00:36:37.068709 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:36:37 crc kubenswrapper[4791]: E1007 00:36:37.069504 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:36:49 crc kubenswrapper[4791]: I1007 00:36:49.068898 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:36:49 crc kubenswrapper[4791]: E1007 00:36:49.069705 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:36:49 crc kubenswrapper[4791]: I1007 00:36:49.736908 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-6856cfb745-kj4sf_4d373495-5026-4d85-af77-e8e11f853bf2/prometheus-webhook-snmp/0.log" Oct 07 00:36:55 crc kubenswrapper[4791]: I1007 00:36:55.696658 4791 generic.go:334] "Generic (PLEG): container finished" podID="35af1125-b5a2-4028-b610-3b5957d0c6e7" containerID="052792ee479bd78236c98cbe57eec092bc1e03574ead29cf0cf693498b8477cf" exitCode=0 Oct 07 00:36:55 crc kubenswrapper[4791]: I1007 00:36:55.696741 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-xwzff" event={"ID":"35af1125-b5a2-4028-b610-3b5957d0c6e7","Type":"ContainerDied","Data":"052792ee479bd78236c98cbe57eec092bc1e03574ead29cf0cf693498b8477cf"} Oct 07 00:36:55 crc kubenswrapper[4791]: I1007 00:36:55.697660 4791 scope.go:117] "RemoveContainer" containerID="052792ee479bd78236c98cbe57eec092bc1e03574ead29cf0cf693498b8477cf" Oct 07 00:37:01 crc kubenswrapper[4791]: I1007 00:37:01.756790 4791 generic.go:334] "Generic (PLEG): container finished" podID="35af1125-b5a2-4028-b610-3b5957d0c6e7" containerID="1d6c68f07670337daff8536f9dfa6b1f1a545351d1d3f6ceb91a7c6e39da1b71" exitCode=0 Oct 07 00:37:01 crc kubenswrapper[4791]: I1007 00:37:01.756851 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-xwzff" event={"ID":"35af1125-b5a2-4028-b610-3b5957d0c6e7","Type":"ContainerDied","Data":"1d6c68f07670337daff8536f9dfa6b1f1a545351d1d3f6ceb91a7c6e39da1b71"} Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.017692 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.069495 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:37:03 crc kubenswrapper[4791]: E1007 00:37:03.069823 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.112216 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-ceilometer-entrypoint-script\") pod \"35af1125-b5a2-4028-b610-3b5957d0c6e7\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.112273 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-collectd-entrypoint-script\") pod \"35af1125-b5a2-4028-b610-3b5957d0c6e7\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.112319 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-healthcheck-log\") pod \"35af1125-b5a2-4028-b610-3b5957d0c6e7\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.112466 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-collectd-config\") pod \"35af1125-b5a2-4028-b610-3b5957d0c6e7\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.112512 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-ceilometer-publisher\") pod \"35af1125-b5a2-4028-b610-3b5957d0c6e7\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.112541 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-sensubility-config\") pod \"35af1125-b5a2-4028-b610-3b5957d0c6e7\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.112570 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzxvw\" (UniqueName: \"kubernetes.io/projected/35af1125-b5a2-4028-b610-3b5957d0c6e7-kube-api-access-fzxvw\") pod \"35af1125-b5a2-4028-b610-3b5957d0c6e7\" (UID: \"35af1125-b5a2-4028-b610-3b5957d0c6e7\") " Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.139572 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35af1125-b5a2-4028-b610-3b5957d0c6e7-kube-api-access-fzxvw" (OuterVolumeSpecName: "kube-api-access-fzxvw") pod "35af1125-b5a2-4028-b610-3b5957d0c6e7" (UID: "35af1125-b5a2-4028-b610-3b5957d0c6e7"). InnerVolumeSpecName "kube-api-access-fzxvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.148654 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "35af1125-b5a2-4028-b610-3b5957d0c6e7" (UID: "35af1125-b5a2-4028-b610-3b5957d0c6e7"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.163155 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "35af1125-b5a2-4028-b610-3b5957d0c6e7" (UID: "35af1125-b5a2-4028-b610-3b5957d0c6e7"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.168169 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "35af1125-b5a2-4028-b610-3b5957d0c6e7" (UID: "35af1125-b5a2-4028-b610-3b5957d0c6e7"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.179992 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "35af1125-b5a2-4028-b610-3b5957d0c6e7" (UID: "35af1125-b5a2-4028-b610-3b5957d0c6e7"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.188155 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "35af1125-b5a2-4028-b610-3b5957d0c6e7" (UID: "35af1125-b5a2-4028-b610-3b5957d0c6e7"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.199162 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "35af1125-b5a2-4028-b610-3b5957d0c6e7" (UID: "35af1125-b5a2-4028-b610-3b5957d0c6e7"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.214494 4791 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.214536 4791 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-sensubility-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.214548 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzxvw\" (UniqueName: \"kubernetes.io/projected/35af1125-b5a2-4028-b610-3b5957d0c6e7-kube-api-access-fzxvw\") on node \"crc\" DevicePath \"\"" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.214560 4791 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.214594 4791 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.214606 4791 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-healthcheck-log\") on node \"crc\" DevicePath \"\"" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.214617 4791 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/35af1125-b5a2-4028-b610-3b5957d0c6e7-collectd-config\") on node \"crc\" DevicePath \"\"" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.789345 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-xwzff" event={"ID":"35af1125-b5a2-4028-b610-3b5957d0c6e7","Type":"ContainerDied","Data":"7bfe6c1888a140dd95e1dd854f8902e5b112d23478a0f09e1b274984f82f22cf"} Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.789384 4791 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bfe6c1888a140dd95e1dd854f8902e5b112d23478a0f09e1b274984f82f22cf" Oct 07 00:37:03 crc kubenswrapper[4791]: I1007 00:37:03.789459 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-xwzff" Oct 07 00:37:05 crc kubenswrapper[4791]: I1007 00:37:05.318887 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-xwzff_35af1125-b5a2-4028-b610-3b5957d0c6e7/smoketest-collectd/0.log" Oct 07 00:37:05 crc kubenswrapper[4791]: I1007 00:37:05.657976 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-xwzff_35af1125-b5a2-4028-b610-3b5957d0c6e7/smoketest-ceilometer/0.log" Oct 07 00:37:06 crc kubenswrapper[4791]: I1007 00:37:06.013779 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-interconnect-68864d46cb-lvhbs_333cc33f-5158-45df-864a-f4462312e8c9/default-interconnect/0.log" Oct 07 00:37:06 crc kubenswrapper[4791]: I1007 00:37:06.350197 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th_3a91f4b9-8355-444b-9ba0-d4ceab19244b/bridge/2.log" Oct 07 00:37:06 crc kubenswrapper[4791]: I1007 00:37:06.722780 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7cd87f9766-4m8th_3a91f4b9-8355-444b-9ba0-d4ceab19244b/sg-core/0.log" Oct 07 00:37:07 crc kubenswrapper[4791]: I1007 00:37:07.040974 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj_62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37/bridge/2.log" Oct 07 00:37:07 crc kubenswrapper[4791]: I1007 00:37:07.325028 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-54c65f8d7b-r6tpj_62d61f8f-8f11-48b3-b3d5-ecbdcdb2ba37/sg-core/0.log" Oct 07 00:37:07 crc kubenswrapper[4791]: I1007 00:37:07.606537 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-57948895dc-86mts_82f12631-923c-4251-9fa8-ff7358158c78/bridge/2.log" Oct 07 00:37:07 crc kubenswrapper[4791]: I1007 00:37:07.941513 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-57948895dc-86mts_82f12631-923c-4251-9fa8-ff7358158c78/sg-core/0.log" Oct 07 00:37:08 crc kubenswrapper[4791]: I1007 00:37:08.301187 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-f549b7776-ktssv_e03dece7-b0aa-4a45-a1bd-fbd6f7377006/bridge/2.log" Oct 07 00:37:08 crc kubenswrapper[4791]: I1007 00:37:08.662299 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-f549b7776-ktssv_e03dece7-b0aa-4a45-a1bd-fbd6f7377006/sg-core/0.log" Oct 07 00:37:08 crc kubenswrapper[4791]: I1007 00:37:08.991874 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h_ffbf236c-e7ab-41a2-8083-ff71bb300966/bridge/2.log" Oct 07 00:37:09 crc kubenswrapper[4791]: I1007 00:37:09.352598 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-5759b4d97-d2k5h_ffbf236c-e7ab-41a2-8083-ff71bb300966/sg-core/0.log" Oct 07 00:37:11 crc kubenswrapper[4791]: I1007 00:37:11.943300 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-5d7865cd-khh29_b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e/operator/0.log" Oct 07 00:37:12 crc kubenswrapper[4791]: I1007 00:37:12.288996 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_ffe9e3fe-d9ab-478e-949f-0c8554bdf743/prometheus/0.log" Oct 07 00:37:12 crc kubenswrapper[4791]: I1007 00:37:12.650933 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_c523f120-41c2-4ef3-a9f4-f4b42e971c5b/elasticsearch/0.log" Oct 07 00:37:12 crc kubenswrapper[4791]: I1007 00:37:12.978934 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-6856cfb745-kj4sf_4d373495-5026-4d85-af77-e8e11f853bf2/prometheus-webhook-snmp/0.log" Oct 07 00:37:13 crc kubenswrapper[4791]: I1007 00:37:13.314585 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_123abef3-fcea-4f90-9058-7dccb2de989a/alertmanager/0.log" Oct 07 00:37:14 crc kubenswrapper[4791]: I1007 00:37:14.087597 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:37:14 crc kubenswrapper[4791]: E1007 00:37:14.087812 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:37:27 crc kubenswrapper[4791]: I1007 00:37:27.069923 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:37:27 crc kubenswrapper[4791]: E1007 00:37:27.070934 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:37:30 crc kubenswrapper[4791]: I1007 00:37:30.726632 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-68b9c75957-c2wzb_4ce6e294-f1e5-446b-a55f-fdee99b8b961/operator/0.log" Oct 07 00:37:33 crc kubenswrapper[4791]: I1007 00:37:33.396563 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-5d7865cd-khh29_b2d6bf22-0a29-4452-a0c8-7143c5f0fd5e/operator/0.log" Oct 07 00:37:33 crc kubenswrapper[4791]: I1007 00:37:33.743870 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_qdr-test_614d9c30-e7cd-48ac-8989-5f48bebe9ef8/qdr/0.log" Oct 07 00:37:40 crc kubenswrapper[4791]: I1007 00:37:40.069348 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:37:40 crc kubenswrapper[4791]: E1007 00:37:40.070287 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:37:53 crc kubenswrapper[4791]: I1007 00:37:53.068907 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:37:53 crc kubenswrapper[4791]: E1007 00:37:53.070252 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:38:05 crc kubenswrapper[4791]: I1007 00:38:05.069263 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:38:05 crc kubenswrapper[4791]: E1007 00:38:05.069961 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.599388 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xmt44/must-gather-rsmvw"] Oct 07 00:38:12 crc kubenswrapper[4791]: E1007 00:38:12.600354 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edce564f-f571-4bc9-8a65-fe088f32f270" containerName="curl" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.600374 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="edce564f-f571-4bc9-8a65-fe088f32f270" containerName="curl" Oct 07 00:38:12 crc kubenswrapper[4791]: E1007 00:38:12.600394 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35af1125-b5a2-4028-b610-3b5957d0c6e7" containerName="smoketest-ceilometer" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.600423 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="35af1125-b5a2-4028-b610-3b5957d0c6e7" containerName="smoketest-ceilometer" Oct 07 00:38:12 crc kubenswrapper[4791]: E1007 00:38:12.600444 4791 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35af1125-b5a2-4028-b610-3b5957d0c6e7" containerName="smoketest-collectd" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.600452 4791 state_mem.go:107] "Deleted CPUSet assignment" podUID="35af1125-b5a2-4028-b610-3b5957d0c6e7" containerName="smoketest-collectd" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.600609 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="edce564f-f571-4bc9-8a65-fe088f32f270" containerName="curl" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.600623 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="35af1125-b5a2-4028-b610-3b5957d0c6e7" containerName="smoketest-collectd" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.600635 4791 memory_manager.go:354] "RemoveStaleState removing state" podUID="35af1125-b5a2-4028-b610-3b5957d0c6e7" containerName="smoketest-ceilometer" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.601471 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xmt44/must-gather-rsmvw" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.607090 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xmt44"/"kube-root-ca.crt" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.612048 4791 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xmt44"/"openshift-service-ca.crt" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.636536 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xmt44/must-gather-rsmvw"] Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.734296 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/59acf018-260e-4729-a86c-6f3436161ebb-must-gather-output\") pod \"must-gather-rsmvw\" (UID: \"59acf018-260e-4729-a86c-6f3436161ebb\") " pod="openshift-must-gather-xmt44/must-gather-rsmvw" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.734447 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlj8q\" (UniqueName: \"kubernetes.io/projected/59acf018-260e-4729-a86c-6f3436161ebb-kube-api-access-hlj8q\") pod \"must-gather-rsmvw\" (UID: \"59acf018-260e-4729-a86c-6f3436161ebb\") " pod="openshift-must-gather-xmt44/must-gather-rsmvw" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.835596 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlj8q\" (UniqueName: \"kubernetes.io/projected/59acf018-260e-4729-a86c-6f3436161ebb-kube-api-access-hlj8q\") pod \"must-gather-rsmvw\" (UID: \"59acf018-260e-4729-a86c-6f3436161ebb\") " pod="openshift-must-gather-xmt44/must-gather-rsmvw" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.835684 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/59acf018-260e-4729-a86c-6f3436161ebb-must-gather-output\") pod \"must-gather-rsmvw\" (UID: \"59acf018-260e-4729-a86c-6f3436161ebb\") " pod="openshift-must-gather-xmt44/must-gather-rsmvw" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.836239 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/59acf018-260e-4729-a86c-6f3436161ebb-must-gather-output\") pod \"must-gather-rsmvw\" (UID: \"59acf018-260e-4729-a86c-6f3436161ebb\") " pod="openshift-must-gather-xmt44/must-gather-rsmvw" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.864168 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlj8q\" (UniqueName: \"kubernetes.io/projected/59acf018-260e-4729-a86c-6f3436161ebb-kube-api-access-hlj8q\") pod \"must-gather-rsmvw\" (UID: \"59acf018-260e-4729-a86c-6f3436161ebb\") " pod="openshift-must-gather-xmt44/must-gather-rsmvw" Oct 07 00:38:12 crc kubenswrapper[4791]: I1007 00:38:12.931034 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xmt44/must-gather-rsmvw" Oct 07 00:38:13 crc kubenswrapper[4791]: I1007 00:38:13.343582 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xmt44/must-gather-rsmvw"] Oct 07 00:38:14 crc kubenswrapper[4791]: I1007 00:38:14.338541 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xmt44/must-gather-rsmvw" event={"ID":"59acf018-260e-4729-a86c-6f3436161ebb","Type":"ContainerStarted","Data":"178d35144ccbfdedb74d0921242cfa9c15769274a5ea6f48a60a1feb2651bd0c"} Oct 07 00:38:17 crc kubenswrapper[4791]: I1007 00:38:17.377601 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xmt44/must-gather-rsmvw" event={"ID":"59acf018-260e-4729-a86c-6f3436161ebb","Type":"ContainerStarted","Data":"cdea6fd5fe9476a9d0f95353323378365a66477fb083aa6c815f15c465846b85"} Oct 07 00:38:17 crc kubenswrapper[4791]: I1007 00:38:17.378214 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xmt44/must-gather-rsmvw" event={"ID":"59acf018-260e-4729-a86c-6f3436161ebb","Type":"ContainerStarted","Data":"aeefbf7e70fac1cb4577c9c8d2de9a4362631d02849a68a5afe35551aa440872"} Oct 07 00:38:18 crc kubenswrapper[4791]: I1007 00:38:18.069529 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:38:18 crc kubenswrapper[4791]: E1007 00:38:18.069729 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:38:18 crc kubenswrapper[4791]: I1007 00:38:18.792195 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-xmt44/must-gather-rsmvw" podStartSLOduration=3.220855757 podStartE2EDuration="6.792172432s" podCreationTimestamp="2025-10-07 00:38:12 +0000 UTC" firstStartedPulling="2025-10-07 00:38:13.356055545 +0000 UTC m=+1619.951993196" lastFinishedPulling="2025-10-07 00:38:16.92737222 +0000 UTC m=+1623.523309871" observedRunningTime="2025-10-07 00:38:17.395975567 +0000 UTC m=+1623.991913248" watchObservedRunningTime="2025-10-07 00:38:18.792172432 +0000 UTC m=+1625.388110083" Oct 07 00:38:18 crc kubenswrapper[4791]: I1007 00:38:18.794009 4791 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-86k8x"] Oct 07 00:38:18 crc kubenswrapper[4791]: I1007 00:38:18.794973 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-86k8x" Oct 07 00:38:18 crc kubenswrapper[4791]: I1007 00:38:18.807579 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-86k8x"] Oct 07 00:38:18 crc kubenswrapper[4791]: I1007 00:38:18.925245 4791 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2bp6\" (UniqueName: \"kubernetes.io/projected/e573ed31-b7c6-4c1f-acce-769e2d2a8ecd-kube-api-access-k2bp6\") pod \"infrawatch-operators-86k8x\" (UID: \"e573ed31-b7c6-4c1f-acce-769e2d2a8ecd\") " pod="service-telemetry/infrawatch-operators-86k8x" Oct 07 00:38:19 crc kubenswrapper[4791]: I1007 00:38:19.026208 4791 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2bp6\" (UniqueName: \"kubernetes.io/projected/e573ed31-b7c6-4c1f-acce-769e2d2a8ecd-kube-api-access-k2bp6\") pod \"infrawatch-operators-86k8x\" (UID: \"e573ed31-b7c6-4c1f-acce-769e2d2a8ecd\") " pod="service-telemetry/infrawatch-operators-86k8x" Oct 07 00:38:19 crc kubenswrapper[4791]: I1007 00:38:19.054026 4791 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2bp6\" (UniqueName: \"kubernetes.io/projected/e573ed31-b7c6-4c1f-acce-769e2d2a8ecd-kube-api-access-k2bp6\") pod \"infrawatch-operators-86k8x\" (UID: \"e573ed31-b7c6-4c1f-acce-769e2d2a8ecd\") " pod="service-telemetry/infrawatch-operators-86k8x" Oct 07 00:38:19 crc kubenswrapper[4791]: I1007 00:38:19.110765 4791 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-86k8x" Oct 07 00:38:19 crc kubenswrapper[4791]: I1007 00:38:19.541023 4791 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-86k8x"] Oct 07 00:38:19 crc kubenswrapper[4791]: W1007 00:38:19.553293 4791 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode573ed31_b7c6_4c1f_acce_769e2d2a8ecd.slice/crio-47f6ebed4cff6ca4f3ea2fdf92aa5f0c2350b694f211e6207400d5f7a8ce7529 WatchSource:0}: Error finding container 47f6ebed4cff6ca4f3ea2fdf92aa5f0c2350b694f211e6207400d5f7a8ce7529: Status 404 returned error can't find the container with id 47f6ebed4cff6ca4f3ea2fdf92aa5f0c2350b694f211e6207400d5f7a8ce7529 Oct 07 00:38:19 crc kubenswrapper[4791]: I1007 00:38:19.555741 4791 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 00:38:20 crc kubenswrapper[4791]: I1007 00:38:20.399553 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-86k8x" event={"ID":"e573ed31-b7c6-4c1f-acce-769e2d2a8ecd","Type":"ContainerStarted","Data":"c8a2df6880bbb7ea8a88d0606bae2a21b5b5f3077ba24218c6450e630cca3a93"} Oct 07 00:38:20 crc kubenswrapper[4791]: I1007 00:38:20.399596 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-86k8x" event={"ID":"e573ed31-b7c6-4c1f-acce-769e2d2a8ecd","Type":"ContainerStarted","Data":"47f6ebed4cff6ca4f3ea2fdf92aa5f0c2350b694f211e6207400d5f7a8ce7529"} Oct 07 00:38:20 crc kubenswrapper[4791]: I1007 00:38:20.414760 4791 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-86k8x" podStartSLOduration=2.322606737 podStartE2EDuration="2.414738713s" podCreationTimestamp="2025-10-07 00:38:18 +0000 UTC" firstStartedPulling="2025-10-07 00:38:19.555499111 +0000 UTC m=+1626.151436762" lastFinishedPulling="2025-10-07 00:38:19.647631087 +0000 UTC m=+1626.243568738" observedRunningTime="2025-10-07 00:38:20.410991646 +0000 UTC m=+1627.006929297" watchObservedRunningTime="2025-10-07 00:38:20.414738713 +0000 UTC m=+1627.010676364" Oct 07 00:38:28 crc kubenswrapper[4791]: I1007 00:38:28.492804 4791 scope.go:117] "RemoveContainer" containerID="2498ce595b0b891992c65424fa19fdc78dd589db585bbb1de2fdc50a2a1bae61" Oct 07 00:38:28 crc kubenswrapper[4791]: I1007 00:38:28.528823 4791 scope.go:117] "RemoveContainer" containerID="047d0ece5b78eb3627ceec5b6488fbd81d7f26da9e9555a94e57c4a1231f0cca" Oct 07 00:38:28 crc kubenswrapper[4791]: I1007 00:38:28.623273 4791 scope.go:117] "RemoveContainer" containerID="153503d16ea3f3f23e80bcbb95564360ba5f8d1d798ba4658aea9e7e7d146d24" Oct 07 00:38:28 crc kubenswrapper[4791]: I1007 00:38:28.656159 4791 scope.go:117] "RemoveContainer" containerID="d1bb4f8382376ee4ba1579aed38fb3a4f8a87ad2a1ef5f6be9b901b04d81ade3" Oct 07 00:38:29 crc kubenswrapper[4791]: I1007 00:38:29.111842 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-86k8x" Oct 07 00:38:29 crc kubenswrapper[4791]: I1007 00:38:29.111930 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-86k8x" Oct 07 00:38:29 crc kubenswrapper[4791]: I1007 00:38:29.154897 4791 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-86k8x" Oct 07 00:38:29 crc kubenswrapper[4791]: I1007 00:38:29.504332 4791 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-86k8x" Oct 07 00:38:29 crc kubenswrapper[4791]: I1007 00:38:29.554050 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-86k8x"] Oct 07 00:38:31 crc kubenswrapper[4791]: I1007 00:38:31.485943 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/infrawatch-operators-86k8x" podUID="e573ed31-b7c6-4c1f-acce-769e2d2a8ecd" containerName="registry-server" containerID="cri-o://c8a2df6880bbb7ea8a88d0606bae2a21b5b5f3077ba24218c6450e630cca3a93" gracePeriod=2 Oct 07 00:38:31 crc kubenswrapper[4791]: I1007 00:38:31.855415 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-86k8x" Oct 07 00:38:32 crc kubenswrapper[4791]: I1007 00:38:32.021606 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2bp6\" (UniqueName: \"kubernetes.io/projected/e573ed31-b7c6-4c1f-acce-769e2d2a8ecd-kube-api-access-k2bp6\") pod \"e573ed31-b7c6-4c1f-acce-769e2d2a8ecd\" (UID: \"e573ed31-b7c6-4c1f-acce-769e2d2a8ecd\") " Oct 07 00:38:32 crc kubenswrapper[4791]: I1007 00:38:32.028666 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e573ed31-b7c6-4c1f-acce-769e2d2a8ecd-kube-api-access-k2bp6" (OuterVolumeSpecName: "kube-api-access-k2bp6") pod "e573ed31-b7c6-4c1f-acce-769e2d2a8ecd" (UID: "e573ed31-b7c6-4c1f-acce-769e2d2a8ecd"). InnerVolumeSpecName "kube-api-access-k2bp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:38:32 crc kubenswrapper[4791]: I1007 00:38:32.123994 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2bp6\" (UniqueName: \"kubernetes.io/projected/e573ed31-b7c6-4c1f-acce-769e2d2a8ecd-kube-api-access-k2bp6\") on node \"crc\" DevicePath \"\"" Oct 07 00:38:32 crc kubenswrapper[4791]: I1007 00:38:32.495677 4791 generic.go:334] "Generic (PLEG): container finished" podID="e573ed31-b7c6-4c1f-acce-769e2d2a8ecd" containerID="c8a2df6880bbb7ea8a88d0606bae2a21b5b5f3077ba24218c6450e630cca3a93" exitCode=0 Oct 07 00:38:32 crc kubenswrapper[4791]: I1007 00:38:32.495730 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-86k8x" event={"ID":"e573ed31-b7c6-4c1f-acce-769e2d2a8ecd","Type":"ContainerDied","Data":"c8a2df6880bbb7ea8a88d0606bae2a21b5b5f3077ba24218c6450e630cca3a93"} Oct 07 00:38:32 crc kubenswrapper[4791]: I1007 00:38:32.495763 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-86k8x" event={"ID":"e573ed31-b7c6-4c1f-acce-769e2d2a8ecd","Type":"ContainerDied","Data":"47f6ebed4cff6ca4f3ea2fdf92aa5f0c2350b694f211e6207400d5f7a8ce7529"} Oct 07 00:38:32 crc kubenswrapper[4791]: I1007 00:38:32.495784 4791 scope.go:117] "RemoveContainer" containerID="c8a2df6880bbb7ea8a88d0606bae2a21b5b5f3077ba24218c6450e630cca3a93" Oct 07 00:38:32 crc kubenswrapper[4791]: I1007 00:38:32.495928 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-86k8x" Oct 07 00:38:32 crc kubenswrapper[4791]: I1007 00:38:32.512878 4791 scope.go:117] "RemoveContainer" containerID="c8a2df6880bbb7ea8a88d0606bae2a21b5b5f3077ba24218c6450e630cca3a93" Oct 07 00:38:32 crc kubenswrapper[4791]: E1007 00:38:32.513366 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8a2df6880bbb7ea8a88d0606bae2a21b5b5f3077ba24218c6450e630cca3a93\": container with ID starting with c8a2df6880bbb7ea8a88d0606bae2a21b5b5f3077ba24218c6450e630cca3a93 not found: ID does not exist" containerID="c8a2df6880bbb7ea8a88d0606bae2a21b5b5f3077ba24218c6450e630cca3a93" Oct 07 00:38:32 crc kubenswrapper[4791]: I1007 00:38:32.513435 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8a2df6880bbb7ea8a88d0606bae2a21b5b5f3077ba24218c6450e630cca3a93"} err="failed to get container status \"c8a2df6880bbb7ea8a88d0606bae2a21b5b5f3077ba24218c6450e630cca3a93\": rpc error: code = NotFound desc = could not find container \"c8a2df6880bbb7ea8a88d0606bae2a21b5b5f3077ba24218c6450e630cca3a93\": container with ID starting with c8a2df6880bbb7ea8a88d0606bae2a21b5b5f3077ba24218c6450e630cca3a93 not found: ID does not exist" Oct 07 00:38:32 crc kubenswrapper[4791]: I1007 00:38:32.515243 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-86k8x"] Oct 07 00:38:32 crc kubenswrapper[4791]: I1007 00:38:32.520297 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/infrawatch-operators-86k8x"] Oct 07 00:38:33 crc kubenswrapper[4791]: I1007 00:38:33.068780 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:38:33 crc kubenswrapper[4791]: E1007 00:38:33.069420 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:38:34 crc kubenswrapper[4791]: I1007 00:38:34.100554 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e573ed31-b7c6-4c1f-acce-769e2d2a8ecd" path="/var/lib/kubelet/pods/e573ed31-b7c6-4c1f-acce-769e2d2a8ecd/volumes" Oct 07 00:38:46 crc kubenswrapper[4791]: I1007 00:38:46.069460 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:38:46 crc kubenswrapper[4791]: E1007 00:38:46.070230 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:38:56 crc kubenswrapper[4791]: I1007 00:38:56.613157 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-5k9gm_d8e13d63-c1dd-4a68-bd18-b65592799f10/control-plane-machine-set-operator/0.log" Oct 07 00:38:56 crc kubenswrapper[4791]: I1007 00:38:56.703092 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-frzvc_4ebf8d1c-8509-4eda-9f7a-d034c0cb7500/kube-rbac-proxy/0.log" Oct 07 00:38:56 crc kubenswrapper[4791]: I1007 00:38:56.764124 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-frzvc_4ebf8d1c-8509-4eda-9f7a-d034c0cb7500/machine-api-operator/0.log" Oct 07 00:38:57 crc kubenswrapper[4791]: I1007 00:38:57.068908 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:38:57 crc kubenswrapper[4791]: E1007 00:38:57.069135 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:39:07 crc kubenswrapper[4791]: I1007 00:39:07.471879 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-7d4cc89fcb-7mggb_a48eeb9a-a63d-45e3-b3ae-dae3aeedb6ce/cert-manager-controller/0.log" Oct 07 00:39:07 crc kubenswrapper[4791]: I1007 00:39:07.632671 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-d969966f-lrnqq_ba95eb3a-f7cd-4e9d-be3c-4e1dd78d8fae/cert-manager-webhook/0.log" Oct 07 00:39:07 crc kubenswrapper[4791]: I1007 00:39:07.634048 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7d9f95dbf-x797z_01fdf850-6e4b-46f1-9a87-651a40d459fc/cert-manager-cainjector/0.log" Oct 07 00:39:09 crc kubenswrapper[4791]: I1007 00:39:09.069235 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:39:09 crc kubenswrapper[4791]: E1007 00:39:09.069628 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:39:21 crc kubenswrapper[4791]: I1007 00:39:21.465220 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj_637b38c4-c723-4e98-afd3-897f73d13259/util/0.log" Oct 07 00:39:21 crc kubenswrapper[4791]: I1007 00:39:21.627252 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj_637b38c4-c723-4e98-afd3-897f73d13259/util/0.log" Oct 07 00:39:21 crc kubenswrapper[4791]: I1007 00:39:21.648097 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj_637b38c4-c723-4e98-afd3-897f73d13259/pull/0.log" Oct 07 00:39:21 crc kubenswrapper[4791]: I1007 00:39:21.695084 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj_637b38c4-c723-4e98-afd3-897f73d13259/pull/0.log" Oct 07 00:39:21 crc kubenswrapper[4791]: I1007 00:39:21.817084 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj_637b38c4-c723-4e98-afd3-897f73d13259/extract/0.log" Oct 07 00:39:21 crc kubenswrapper[4791]: I1007 00:39:21.827666 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj_637b38c4-c723-4e98-afd3-897f73d13259/util/0.log" Oct 07 00:39:21 crc kubenswrapper[4791]: I1007 00:39:21.866917 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb697bkhj_637b38c4-c723-4e98-afd3-897f73d13259/pull/0.log" Oct 07 00:39:21 crc kubenswrapper[4791]: I1007 00:39:21.997297 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj_385c48b7-e194-421e-a73f-f214f2666ed7/util/0.log" Oct 07 00:39:22 crc kubenswrapper[4791]: I1007 00:39:22.155004 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj_385c48b7-e194-421e-a73f-f214f2666ed7/pull/0.log" Oct 07 00:39:22 crc kubenswrapper[4791]: I1007 00:39:22.155581 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj_385c48b7-e194-421e-a73f-f214f2666ed7/util/0.log" Oct 07 00:39:22 crc kubenswrapper[4791]: I1007 00:39:22.199780 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj_385c48b7-e194-421e-a73f-f214f2666ed7/pull/0.log" Oct 07 00:39:22 crc kubenswrapper[4791]: I1007 00:39:22.367780 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj_385c48b7-e194-421e-a73f-f214f2666ed7/extract/0.log" Oct 07 00:39:22 crc kubenswrapper[4791]: I1007 00:39:22.377311 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj_385c48b7-e194-421e-a73f-f214f2666ed7/pull/0.log" Oct 07 00:39:22 crc kubenswrapper[4791]: I1007 00:39:22.397806 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fc6lcj_385c48b7-e194-421e-a73f-f214f2666ed7/util/0.log" Oct 07 00:39:22 crc kubenswrapper[4791]: I1007 00:39:22.549984 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9_4e7a4cbd-842a-41b7-95f0-934349423df1/util/0.log" Oct 07 00:39:22 crc kubenswrapper[4791]: I1007 00:39:22.815140 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9_4e7a4cbd-842a-41b7-95f0-934349423df1/util/0.log" Oct 07 00:39:22 crc kubenswrapper[4791]: I1007 00:39:22.834177 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9_4e7a4cbd-842a-41b7-95f0-934349423df1/pull/0.log" Oct 07 00:39:22 crc kubenswrapper[4791]: I1007 00:39:22.841660 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9_4e7a4cbd-842a-41b7-95f0-934349423df1/pull/0.log" Oct 07 00:39:23 crc kubenswrapper[4791]: I1007 00:39:23.008941 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9_4e7a4cbd-842a-41b7-95f0-934349423df1/pull/0.log" Oct 07 00:39:23 crc kubenswrapper[4791]: I1007 00:39:23.010481 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9_4e7a4cbd-842a-41b7-95f0-934349423df1/extract/0.log" Oct 07 00:39:23 crc kubenswrapper[4791]: I1007 00:39:23.019936 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2ddf2d9_4e7a4cbd-842a-41b7-95f0-934349423df1/util/0.log" Oct 07 00:39:23 crc kubenswrapper[4791]: I1007 00:39:23.180811 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m6x2v_f2100e2e-ec37-42cc-9e3e-d3bc94f7afec/extract-utilities/0.log" Oct 07 00:39:23 crc kubenswrapper[4791]: I1007 00:39:23.328270 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m6x2v_f2100e2e-ec37-42cc-9e3e-d3bc94f7afec/extract-content/0.log" Oct 07 00:39:23 crc kubenswrapper[4791]: I1007 00:39:23.332368 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m6x2v_f2100e2e-ec37-42cc-9e3e-d3bc94f7afec/extract-utilities/0.log" Oct 07 00:39:23 crc kubenswrapper[4791]: I1007 00:39:23.376910 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m6x2v_f2100e2e-ec37-42cc-9e3e-d3bc94f7afec/extract-content/0.log" Oct 07 00:39:23 crc kubenswrapper[4791]: I1007 00:39:23.486944 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m6x2v_f2100e2e-ec37-42cc-9e3e-d3bc94f7afec/extract-utilities/0.log" Oct 07 00:39:23 crc kubenswrapper[4791]: I1007 00:39:23.513161 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m6x2v_f2100e2e-ec37-42cc-9e3e-d3bc94f7afec/extract-content/0.log" Oct 07 00:39:23 crc kubenswrapper[4791]: I1007 00:39:23.663649 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nlxvp_4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c/extract-utilities/0.log" Oct 07 00:39:23 crc kubenswrapper[4791]: I1007 00:39:23.770109 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m6x2v_f2100e2e-ec37-42cc-9e3e-d3bc94f7afec/registry-server/0.log" Oct 07 00:39:23 crc kubenswrapper[4791]: I1007 00:39:23.866958 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nlxvp_4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c/extract-content/0.log" Oct 07 00:39:23 crc kubenswrapper[4791]: I1007 00:39:23.868776 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nlxvp_4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c/extract-utilities/0.log" Oct 07 00:39:23 crc kubenswrapper[4791]: I1007 00:39:23.911045 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nlxvp_4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c/extract-content/0.log" Oct 07 00:39:24 crc kubenswrapper[4791]: I1007 00:39:24.045560 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nlxvp_4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c/extract-utilities/0.log" Oct 07 00:39:24 crc kubenswrapper[4791]: I1007 00:39:24.062900 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nlxvp_4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c/extract-content/0.log" Oct 07 00:39:24 crc kubenswrapper[4791]: I1007 00:39:24.072922 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:39:24 crc kubenswrapper[4791]: E1007 00:39:24.073198 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:39:24 crc kubenswrapper[4791]: I1007 00:39:24.224474 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x_3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2/util/0.log" Oct 07 00:39:24 crc kubenswrapper[4791]: I1007 00:39:24.432230 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nlxvp_4cc8cf38-a2c7-477d-81f6-d5f5a8130e1c/registry-server/0.log" Oct 07 00:39:24 crc kubenswrapper[4791]: I1007 00:39:24.463295 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x_3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2/pull/0.log" Oct 07 00:39:24 crc kubenswrapper[4791]: I1007 00:39:24.471395 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x_3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2/pull/0.log" Oct 07 00:39:24 crc kubenswrapper[4791]: I1007 00:39:24.518087 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x_3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2/util/0.log" Oct 07 00:39:24 crc kubenswrapper[4791]: I1007 00:39:24.643842 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x_3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2/pull/0.log" Oct 07 00:39:24 crc kubenswrapper[4791]: I1007 00:39:24.656612 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x_3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2/util/0.log" Oct 07 00:39:24 crc kubenswrapper[4791]: I1007 00:39:24.673939 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927lpv6x_3293c35c-f4ba-4d9c-9f1a-5cf9620f7dd2/extract/0.log" Oct 07 00:39:24 crc kubenswrapper[4791]: I1007 00:39:24.726797 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-s4l9q_9c998b5e-063d-4cb7-8eeb-f479d11a11ba/marketplace-operator/0.log" Oct 07 00:39:24 crc kubenswrapper[4791]: I1007 00:39:24.818394 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-klxwq_034480a5-7c4f-48ee-86ad-d358e746e74b/extract-utilities/0.log" Oct 07 00:39:24 crc kubenswrapper[4791]: I1007 00:39:24.987271 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-klxwq_034480a5-7c4f-48ee-86ad-d358e746e74b/extract-utilities/0.log" Oct 07 00:39:25 crc kubenswrapper[4791]: I1007 00:39:25.015487 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-klxwq_034480a5-7c4f-48ee-86ad-d358e746e74b/extract-content/0.log" Oct 07 00:39:25 crc kubenswrapper[4791]: I1007 00:39:25.015582 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-klxwq_034480a5-7c4f-48ee-86ad-d358e746e74b/extract-content/0.log" Oct 07 00:39:25 crc kubenswrapper[4791]: I1007 00:39:25.133894 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-klxwq_034480a5-7c4f-48ee-86ad-d358e746e74b/extract-content/0.log" Oct 07 00:39:25 crc kubenswrapper[4791]: I1007 00:39:25.145471 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-klxwq_034480a5-7c4f-48ee-86ad-d358e746e74b/extract-utilities/0.log" Oct 07 00:39:25 crc kubenswrapper[4791]: I1007 00:39:25.463975 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-klxwq_034480a5-7c4f-48ee-86ad-d358e746e74b/registry-server/0.log" Oct 07 00:39:35 crc kubenswrapper[4791]: I1007 00:39:35.290228 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-7n58g_f3a541f9-9f16-46d8-bf15-61223084be30/prometheus-operator/0.log" Oct 07 00:39:35 crc kubenswrapper[4791]: I1007 00:39:35.415454 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7dcbffb685-bk7qd_d71a9a4d-ce1e-4b77-943c-33bdf244ddc5/prometheus-operator-admission-webhook/0.log" Oct 07 00:39:35 crc kubenswrapper[4791]: I1007 00:39:35.451320 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7dcbffb685-s5ncr_5a672a12-8d07-4ce4-a94e-b3e66473f35c/prometheus-operator-admission-webhook/0.log" Oct 07 00:39:35 crc kubenswrapper[4791]: I1007 00:39:35.565782 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-bcfdp_15fb2d33-1abb-4e8a-bef7-4b6d2a643ea6/operator/0.log" Oct 07 00:39:35 crc kubenswrapper[4791]: I1007 00:39:35.633390 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-q5556_84d5cca0-920e-4cc6-ae49-a848e3255ab7/perses-operator/0.log" Oct 07 00:39:36 crc kubenswrapper[4791]: I1007 00:39:36.069208 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:39:36 crc kubenswrapper[4791]: E1007 00:39:36.069436 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:39:48 crc kubenswrapper[4791]: I1007 00:39:48.068833 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:39:48 crc kubenswrapper[4791]: E1007 00:39:48.069845 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:40:02 crc kubenswrapper[4791]: I1007 00:40:02.070351 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:40:02 crc kubenswrapper[4791]: E1007 00:40:02.071734 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:40:14 crc kubenswrapper[4791]: I1007 00:40:14.073479 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:40:14 crc kubenswrapper[4791]: E1007 00:40:14.074571 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:40:18 crc kubenswrapper[4791]: I1007 00:40:18.336975 4791 generic.go:334] "Generic (PLEG): container finished" podID="59acf018-260e-4729-a86c-6f3436161ebb" containerID="aeefbf7e70fac1cb4577c9c8d2de9a4362631d02849a68a5afe35551aa440872" exitCode=0 Oct 07 00:40:18 crc kubenswrapper[4791]: I1007 00:40:18.337041 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xmt44/must-gather-rsmvw" event={"ID":"59acf018-260e-4729-a86c-6f3436161ebb","Type":"ContainerDied","Data":"aeefbf7e70fac1cb4577c9c8d2de9a4362631d02849a68a5afe35551aa440872"} Oct 07 00:40:18 crc kubenswrapper[4791]: I1007 00:40:18.337928 4791 scope.go:117] "RemoveContainer" containerID="aeefbf7e70fac1cb4577c9c8d2de9a4362631d02849a68a5afe35551aa440872" Oct 07 00:40:18 crc kubenswrapper[4791]: I1007 00:40:18.787833 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xmt44_must-gather-rsmvw_59acf018-260e-4729-a86c-6f3436161ebb/gather/0.log" Oct 07 00:40:25 crc kubenswrapper[4791]: I1007 00:40:25.734556 4791 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xmt44/must-gather-rsmvw"] Oct 07 00:40:25 crc kubenswrapper[4791]: I1007 00:40:25.735634 4791 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-xmt44/must-gather-rsmvw" podUID="59acf018-260e-4729-a86c-6f3436161ebb" containerName="copy" containerID="cri-o://cdea6fd5fe9476a9d0f95353323378365a66477fb083aa6c815f15c465846b85" gracePeriod=2 Oct 07 00:40:25 crc kubenswrapper[4791]: I1007 00:40:25.739608 4791 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xmt44/must-gather-rsmvw"] Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.181867 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xmt44_must-gather-rsmvw_59acf018-260e-4729-a86c-6f3436161ebb/copy/0.log" Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.182794 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xmt44/must-gather-rsmvw" Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.378884 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/59acf018-260e-4729-a86c-6f3436161ebb-must-gather-output\") pod \"59acf018-260e-4729-a86c-6f3436161ebb\" (UID: \"59acf018-260e-4729-a86c-6f3436161ebb\") " Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.379009 4791 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlj8q\" (UniqueName: \"kubernetes.io/projected/59acf018-260e-4729-a86c-6f3436161ebb-kube-api-access-hlj8q\") pod \"59acf018-260e-4729-a86c-6f3436161ebb\" (UID: \"59acf018-260e-4729-a86c-6f3436161ebb\") " Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.385959 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59acf018-260e-4729-a86c-6f3436161ebb-kube-api-access-hlj8q" (OuterVolumeSpecName: "kube-api-access-hlj8q") pod "59acf018-260e-4729-a86c-6f3436161ebb" (UID: "59acf018-260e-4729-a86c-6f3436161ebb"). InnerVolumeSpecName "kube-api-access-hlj8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.410067 4791 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xmt44_must-gather-rsmvw_59acf018-260e-4729-a86c-6f3436161ebb/copy/0.log" Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.410370 4791 generic.go:334] "Generic (PLEG): container finished" podID="59acf018-260e-4729-a86c-6f3436161ebb" containerID="cdea6fd5fe9476a9d0f95353323378365a66477fb083aa6c815f15c465846b85" exitCode=143 Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.410434 4791 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xmt44/must-gather-rsmvw" Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.410450 4791 scope.go:117] "RemoveContainer" containerID="cdea6fd5fe9476a9d0f95353323378365a66477fb083aa6c815f15c465846b85" Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.422472 4791 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59acf018-260e-4729-a86c-6f3436161ebb-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "59acf018-260e-4729-a86c-6f3436161ebb" (UID: "59acf018-260e-4729-a86c-6f3436161ebb"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.426066 4791 scope.go:117] "RemoveContainer" containerID="aeefbf7e70fac1cb4577c9c8d2de9a4362631d02849a68a5afe35551aa440872" Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.455912 4791 scope.go:117] "RemoveContainer" containerID="cdea6fd5fe9476a9d0f95353323378365a66477fb083aa6c815f15c465846b85" Oct 07 00:40:26 crc kubenswrapper[4791]: E1007 00:40:26.456423 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdea6fd5fe9476a9d0f95353323378365a66477fb083aa6c815f15c465846b85\": container with ID starting with cdea6fd5fe9476a9d0f95353323378365a66477fb083aa6c815f15c465846b85 not found: ID does not exist" containerID="cdea6fd5fe9476a9d0f95353323378365a66477fb083aa6c815f15c465846b85" Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.456471 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdea6fd5fe9476a9d0f95353323378365a66477fb083aa6c815f15c465846b85"} err="failed to get container status \"cdea6fd5fe9476a9d0f95353323378365a66477fb083aa6c815f15c465846b85\": rpc error: code = NotFound desc = could not find container \"cdea6fd5fe9476a9d0f95353323378365a66477fb083aa6c815f15c465846b85\": container with ID starting with cdea6fd5fe9476a9d0f95353323378365a66477fb083aa6c815f15c465846b85 not found: ID does not exist" Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.456498 4791 scope.go:117] "RemoveContainer" containerID="aeefbf7e70fac1cb4577c9c8d2de9a4362631d02849a68a5afe35551aa440872" Oct 07 00:40:26 crc kubenswrapper[4791]: E1007 00:40:26.456942 4791 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aeefbf7e70fac1cb4577c9c8d2de9a4362631d02849a68a5afe35551aa440872\": container with ID starting with aeefbf7e70fac1cb4577c9c8d2de9a4362631d02849a68a5afe35551aa440872 not found: ID does not exist" containerID="aeefbf7e70fac1cb4577c9c8d2de9a4362631d02849a68a5afe35551aa440872" Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.456972 4791 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeefbf7e70fac1cb4577c9c8d2de9a4362631d02849a68a5afe35551aa440872"} err="failed to get container status \"aeefbf7e70fac1cb4577c9c8d2de9a4362631d02849a68a5afe35551aa440872\": rpc error: code = NotFound desc = could not find container \"aeefbf7e70fac1cb4577c9c8d2de9a4362631d02849a68a5afe35551aa440872\": container with ID starting with aeefbf7e70fac1cb4577c9c8d2de9a4362631d02849a68a5afe35551aa440872 not found: ID does not exist" Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.480833 4791 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/59acf018-260e-4729-a86c-6f3436161ebb-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 07 00:40:26 crc kubenswrapper[4791]: I1007 00:40:26.480865 4791 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlj8q\" (UniqueName: \"kubernetes.io/projected/59acf018-260e-4729-a86c-6f3436161ebb-kube-api-access-hlj8q\") on node \"crc\" DevicePath \"\"" Oct 07 00:40:28 crc kubenswrapper[4791]: I1007 00:40:28.069128 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:40:28 crc kubenswrapper[4791]: E1007 00:40:28.069455 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:40:28 crc kubenswrapper[4791]: I1007 00:40:28.079859 4791 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59acf018-260e-4729-a86c-6f3436161ebb" path="/var/lib/kubelet/pods/59acf018-260e-4729-a86c-6f3436161ebb/volumes" Oct 07 00:40:42 crc kubenswrapper[4791]: I1007 00:40:42.069541 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:40:42 crc kubenswrapper[4791]: E1007 00:40:42.070554 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:40:56 crc kubenswrapper[4791]: I1007 00:40:56.070053 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:40:56 crc kubenswrapper[4791]: E1007 00:40:56.073725 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:41:10 crc kubenswrapper[4791]: I1007 00:41:10.069293 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:41:10 crc kubenswrapper[4791]: E1007 00:41:10.070526 4791 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h728c_openshift-machine-config-operator(ae232b81-12ca-4baa-ad86-96f3fbd32ac9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h728c" podUID="ae232b81-12ca-4baa-ad86-96f3fbd32ac9" Oct 07 00:41:25 crc kubenswrapper[4791]: I1007 00:41:25.070578 4791 scope.go:117] "RemoveContainer" containerID="adab8a3df516c249b0efc3f19cceb00a3a50917df54a4b5c0be2b92ff931cd4e" Oct 07 00:41:25 crc kubenswrapper[4791]: I1007 00:41:25.842956 4791 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h728c" event={"ID":"ae232b81-12ca-4baa-ad86-96f3fbd32ac9","Type":"ContainerStarted","Data":"1dacf5d31b32766c7c64116d643aa81fe33640a050bfc0b689d92dbd1799b59d"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515071060716024450 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015071060717017366 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015071054704016510 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015071054704015460 5ustar corecore